Changeset 4271


Ignore:
Timestamp:
Feb 20, 2007, 3:22:54 PM (17 years ago)
Author:
duncan
Message:

working on updated urs2sww

Location:
anuga_core/source/anuga/shallow_water
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • anuga_core/source/anuga/shallow_water/data_manager.py

    r4268 r4271  
    6262from struct import unpack
    6363import array as p_array
     64#import time, os
     65from os import sep, path
    6466
    6567from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
     
    6870
    6971
    70 from anuga.coordinate_transforms.redfearn import redfearn
     72from anuga.coordinate_transforms.redfearn import redfearn, \
     73     convert_from_latlon_to_utm
    7174from anuga.coordinate_transforms.geo_reference import Geo_reference
    7275from anuga.geospatial_data.geospatial_data import Geospatial_data
    7376from anuga.config import minimum_storable_height as default_minimum_storable_height
    74 from anuga.utilities.numerical_tools import ensure_numeric
     77from anuga.utilities.numerical_tools import ensure_numeric,  mean
    7578from anuga.caching.caching import myhash
    7679from anuga.utilities.anuga_exceptions import ANUGAError
    77 
     80from anuga.pmesh.mesh import Mesh
     81from anuga.shallow_water import Domain
     82from anuga.abstract_2d_finite_volumes.pmesh2domain import \
     83     pmesh_to_domain_instance
     84   
    7885def make_filename(s):
    7986    """Transform argument string into a suitable filename
     
    35643571    """
    35653572
    3566     from shallow_water import Domain
    3567     from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
    3568     import time, os
    3569     #from data_manager import get_dataobject
    3570     from os import sep, path
    3571     from anuga.utilities.numerical_tools import mean
    35723573
    35733574    if verbose == True:print 'Creating domain from', filename
     
    36363637    # go in to the bath dir and load the only file,
    36373638    bath_files = os.listdir(bath_dir)
    3638     #print "bath_files",bath_files
    3639 
    3640     #fixme: make more general?
     3639
    36413640    bath_file = bath_files[0]
    36423641    bath_dir_file =  bath_dir + os.sep + bath_file
    36433642    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
    3644     #print "bath_metadata",bath_metadata
    3645     #print "bath_grid",bath_grid
    36463643
    36473644    #Use the date.time of the bath file as a basis for
     
    36523649    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
    36533650                         + base_start
    3654     #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
    3655     #print "elevation_dir_file",elevation_dir_file
    3656     #print "elevation_grid", elevation_grid
    36573651
    36583652    elevation_files = os.listdir(elevation_dir)
     
    36623656    # the first elevation file should be the
    36633657    # file with the same base name as the bath data
    3664     #print "elevation_files",elevation_files
    3665     #print "'el' + base_start",'el' + base_start
    36663658    assert elevation_files[0] == 'el' + base_start
    3667 
    3668     #assert bath_metadata == elevation_metadata
    3669 
    3670 
    36713659
    36723660    number_of_latitudes = bath_grid.shape[0]
    36733661    number_of_longitudes = bath_grid.shape[1]
    3674     #number_of_times = len(os.listdir(elevation_dir))
    3675     #number_of_points = number_of_latitudes*number_of_longitudes
    36763662    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
    36773663
     
    36843670    latitudes.reverse()
    36853671
    3686     #print "latitudes - before _get_min_max_indexes",latitudes
    36873672    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
    36883673                                                 minlat=minlat, maxlat=maxlat,
     
    36913676
    36923677    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
    3693     #print "bath_grid",bath_grid
    36943678    latitudes = latitudes[kmin:kmax]
    36953679    longitudes = longitudes[lmin:lmax]
     
    36993683    number_of_points = number_of_latitudes*number_of_longitudes
    37003684    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
    3701     #print "number_of_points",number_of_points
    37023685
    37033686    #Work out the times
     
    37093692    else:
    37103693        times = [0.0]
    3711     #print "times", times
    3712     #print "number_of_latitudes", number_of_latitudes
    3713     #print "number_of_longitudes", number_of_longitudes
    3714     #print "number_of_times", number_of_times
    3715     #print "latitudes", latitudes
    3716     #print "longitudes", longitudes
    37173694
    37183695
     
    38493826    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
    38503827
    3851     # do this to create an ok sww file.
    3852     #outfile.variables['time'][:] = [0]   #Store time relative
    3853     #outfile.variables['stage'] = z
    3854     # put the next line up in the code after outfile.order = 1
    3855     #number_of_times = 1
    3856 
    38573828    stage = outfile.variables['stage']
    38583829    xmomentum = outfile.variables['xmomentum']
     
    38713842        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
    38723843
    3873         #print "elevation_grid",elevation_grid
    38743844        #cut matrix to desired size
    38753845        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
    38763846        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
    38773847        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
    3878         #print "elevation_grid",elevation_grid
     3848       
    38793849        # handle missing values
    38803850        missing = (elevation_grid == elevation_meta['NODATA_value'])
     
    45534523            zscale=1,
    45544524            fail_on_NaN=True,
    4555             NaN_filler=0,
    4556             elevation=None):
    4557 
    4558     # Read in urs files
    4559     # create a mesh from the selected points
    4560     # Do some conversions
    4561 
     4525            NaN_filler=0):
     4526    """
     4527    parameters not used!
     4528    NaN_filler
     4529    origin
     4530    #mint=None, maxt=None,
     4531    """
     4532    # Convert to utm
     4533   
     4534    files_in = [basename_in+'-z-mux',
     4535                basename_in+'-e-mux',
     4536                basename_in+'-n-mux']
     4537    quantities = ['HA','UA','VA']
     4538
     4539    # instanciate urs_points of the three mux files.
     4540    mux = {}
     4541    quality_slices = {}
     4542    for quantity, file in map(None, quantities, files_in):
     4543        mux[quantity] = Urs_points(file)
     4544        quality_slices[quantity] = []
     4545        for slice in mux[quantity]:
     4546            quality_slices[quantity].append(slice)
     4547       
     4548       
     4549    # FIXME: check that the depth is the same. (hashing)
     4550
     4551    # handle to a mux file to do depth stuff
     4552    a_mux = mux[quantities[0]]
     4553   
     4554    # Convert to utm
     4555    lat = a_mux.lonlatdep[:,1]
     4556    long = a_mux.lonlatdep[:,0]
     4557    points_utm, zone = convert_from_latlon_to_utm( \
     4558        latitudes=lat, longitudes=long)
     4559    #print "points_utm", points_utm
     4560    #print "zone", zone
     4561
     4562    elevation = a_mux.lonlatdep[:,0] * -1 #
     4563   
     4564    # grid ( create a mesh from the selected points)
     4565    mesh = Mesh()
     4566    mesh.add_vertices(points_utm)
     4567    mesh.auto_segment()
     4568    mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
     4569    mesh_dic = mesh.Mesh2MeshList()
     4570
     4571    #mesh.export_mesh_file(basename_in + '.tsh')
     4572
     4573    times = []
     4574    for i in range(a_mux.time_step_count):
     4575        times.append(a_mux.time_step * i)
     4576       
     4577    points_utm=ensure_numeric(points_utm)
     4578    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
     4579    #print "points_utm", points_utm
     4580    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
     4581           ensure_numeric(points_utm)
     4582   
     4583    volumes = mesh_dic['generatedtrianglelist']
     4584   
     4585    # write sww intro and grid stuff.   
    45624586    if basename_out is None:
    45634587        swwname = basename_in + '.sww'
     
    45664590
    45674591    outfile = NetCDFFile(swwname, 'w')
     4592    # For a different way of doing this, check out tsh2sww
     4593    write_sww_header(outfile, times, len(volumes), len(points_utm))
     4594    write_sww_triangulation(outfile, points_utm, volumes,
     4595                            elevation, zone,  verbose=verbose)
     4596
     4597    write_sww_time_slice(outfile, quality_slices['HA'],
     4598                         quality_slices['UA'], quality_slices['VA'],
     4599                         elevation,
     4600                         mean_stage=mean_stage, zscale=zscale,
     4601                         verbose=verbose)
     4602    #
     4603    # Do some conversions while writing the sww file
     4604
     4605
    45684606
    45694607def write_sww_header(outfile, times, number_of_volumes, number_of_points ):
    4570 
     4608    """
     4609    outfile - the name of the file that will be written
     4610    times - A list of the time slice times
     4611    number_of_volumes - the number of triangles
     4612    """
    45714613    number_of_times = len(times)
    45724614   
     
    46184660                            'number_of_points'))
    46194661
    4620 # Do this as a class
    4621 def read_urs_points(urs_file):
    4622    
    4623     columns = 3 # long, lat , depth
    4624     mux_file = open(file_in, 'rb')
    4625    
    4626     # Number of points/stations
    4627     (points_num,)= unpack('i',mux_file.read(4))
    4628 
    4629     # nt, int - Number of time steps
    4630     (time_step_count,)= unpack('i',mux_file.read(4))
    4631 
    4632     #dt, float - time step, seconds
    4633     (time_step,) = unpack('f', mux_file.read(4))
    4634    
    4635     msg = "Bad data in the mux file."
    4636     if points_num < 0:
    4637         mux_file.close()
    4638         raise ANUGAError, msg
    4639     if time_step_count < 0:
    4640         mux_file.close()
    4641         raise ANUGAError, msg
    4642     if time_step < 0:
    4643         mux_file.close()
    4644         raise ANUGAError, msg
    4645    
    4646     lonlatdep = p_array.array('f')
    4647     lonlatdep.read(mux_file, columns * points_num)
    4648 
    46494662class Urs_points:
    4650 
     4663    """
     4664    Read the info in URS mux files.
     4665
     4666    for the quantities heres a correlation between the file names and
     4667    what they mean;
     4668    z-mux is height above sea level, m
     4669    e-mux is velocity is Eastern direction, m/s
     4670    n-mux is velocity is Northern direction, m/s
     4671
     4672   
     4673    """
    46514674    def __init__(self,urs_file):
    4652        
     4675        self.iterated = False
    46534676        columns = 3 # long, lat , depth
    46544677        mux_file = open(urs_file, 'rb')
     
    46734696            mux_file.close()
    46744697            raise ANUGAError, msg
    4675    
     4698
     4699        # the depth is in meters, and it is the distance from the ocean
     4700        # to the sea bottom.
    46764701        lonlatdep = p_array.array('f')
    46774702        lonlatdep.read(mux_file, columns * self.points_num)
     
    46804705        #print 'lonlatdep',lonlatdep
    46814706        self.lonlatdep = lonlatdep
     4707       
     4708        self.mux_file = mux_file
    46824709        # check this array
    46834710
    46844711    def __iter__(self):
    46854712        """
    4686         iterate over all data which is with respect to time.
     4713        iterate over quantity data which is with respect to time.
     4714
     4715        Note: You can only interate once over an object
     4716       
     4717        returns quantity infomation for each time slice
    46874718        """
    4688         print "self.time_step_count",self.time_step_count
     4719        msg =  "You can only interate once over a urs file."
     4720        assert not self.iterated, msg
    46894721        self.time_step = 0
    4690         self.mux_file = self.mux_file
     4722        self.iterated = True
    46914723        return self
    46924724   
    46934725    def next(self):
    4694         print "self.time_step",self.time_step
    46954726        if self.time_step_count == self.time_step:
     4727            self.close()
    46964728            raise StopIteration
    46974729        #Read in a time slice  from mux file 
    46984730        hz_p_array = p_array.array('f')
    4699         hz_p_array.read(self.mux_file, points_num)
     4731        hz_p_array.read(self.mux_file, self.points_num)
    47004732        hz_p = array(hz_p_array, typecode=Float)
    4701 
    47024733        self.time_step += 1
     4734       
     4735        return hz_p
    47034736
    47044737    def close(self):
     
    47064739
    47074740
    4708        
    4709        
    4710    
    4711 """
    4712 def write_sww_triangulation(number_of_points, ):
     4741def write_sww_triangulation(outfile, points_utm, volumes,
     4742                            elevation, zone, verbose=False):
     4743
     4744    number_of_points = len(points_utm)
    47134745    #Store
    47144746    x = zeros(number_of_points, Float)  #Easting
    47154747    y = zeros(number_of_points, Float)  #Northing
    47164748
    4717     if verbose: print 'Making triangular grid'
    4718     #Get zone of 1st point.
    4719     refzone, _, _ = redfearn(1st point..)
    4720 
    4721     vertices = {}
    4722     i = 0
    4723     for k, lat in enumerate(latitudes):
    4724         for l, lon in enumerate(longitudes):
    4725 
    4726             vertices[l,k] = i
    4727 
    4728             zone, easting, northing = redfearn(lat,lon)
    4729 
    4730             msg = 'Zone boundary crossed at longitude =', lon
    4731             #assert zone == refzone, msg
    4732             #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
    4733             x[i] = easting
    4734             y[i] = northing
    4735             i += 1
    4736 
    4737 
    4738     #Construct 2 triangles per 'rectangular' element
    4739     volumes = []
    4740     for l in range(number_of_longitudes-1):    #X direction
    4741         for k in range(number_of_latitudes-1): #Y direction
    4742             v1 = vertices[l,k+1]
    4743             v2 = vertices[l,k]
    4744             v3 = vertices[l+1,k+1]
    4745             v4 = vertices[l+1,k]
    4746 
    4747             #Note, this is different to the ferrit2sww code
    4748             #since the order of the lats is reversed.
    4749             volumes.append([v1,v3,v2]) #Upper element
    4750             volumes.append([v4,v2,v3]) #Lower element
    4751 
    47524749    volumes = array(volumes)
    47534750
    4754     geo_ref = Geo_reference(refzone,min(x),min(y))
     4751    geo_ref = Geo_reference(zone,min(x),min(y))
    47554752    geo_ref.write_NetCDF(outfile)
    47564753
     
    47694766              %(min(y), max(y),
    47704767                len(y))
     4768        print '    z in [%f, %f], len(z) == %d'\
     4769              %(min(z), max(z),
     4770                len(z))
    47714771        print 'geo_ref: ',geo_ref
    4772 
    4773     z = resize(bath_grid,outfile.variables['z'][:].shape)
     4772        print '------------------------------------------------'
     4773
     4774    #z = resize(bath_grid,outfile.variables['z'][:].shape)
    47744775    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    47754776    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    4776     outfile.variables['z'][:] = z
    4777     outfile.variables['elevation'][:] = z  #FIXME HACK
     4777    outfile.variables['z'][:] = elevation
     4778    outfile.variables['elevation'][:] = elevation  #FIXME HACK
    47784779    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
    4779 """
    4780      
     4780
     4781
     4782    # FIXME: Don't store the whole speeds and stage in  memory.
     4783    # block it?
     4784def write_sww_time_slice(outfile, has, uas, vas, elevation,
     4785                         mean_stage=0, zscale=1,
     4786                         verbose=False):   
     4787    #Time stepping
     4788    stage = outfile.variables['stage']
     4789    xmomentum = outfile.variables['xmomentum']
     4790    ymomentum = outfile.variables['ymomentum']
     4791
     4792    if verbose: print 'Converting quantities'
     4793    n = len(has)
     4794    j = 0
     4795   
     4796    for ha, ua, va in map(None, has, uas, vas):
     4797        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
     4798        w = zscale*ha + mean_stage
     4799        stage[j] = w
     4800        h = w - elevation
     4801        xmomentum[j] = ua*h
     4802        ymomentum[j] = va*h
     4803        j += 1
     4804    outfile.close()
     4805   
    47814806    #### END URS UNGRIDDED 2 SWW ###
    47824807
    47834808#-------------------------------------------------------------
    47844809if __name__ == "__main__":
    4785     points=Urs_points('o_velocity-e-mux')
     4810    points=Urs_points('urs2sww_test/o_velocity-e-mux')
    47864811    for i in points:
     4812        print 'i',i
    47874813        pass
    47884814
  • anuga_core/source/anuga/shallow_water/test_data_manager.py

    r4254 r4271  
    47044704                quantities_init[2].append(e) # VA
    47054705        #print "lonlatdeps",lonlatdeps
     4706
     4707        file_handle, base_name = tempfile.mkstemp("")
     4708        os.close(file_handle)
     4709        os.remove(base_name)
     4710       
     4711        files = []       
     4712        for i,q in enumerate(quantities):
     4713            quantities_init[i] = ensure_numeric(quantities_init[i])
     4714            #print "HA_init", HA_init
     4715            q_time = zeros((time_step_count, points_num), Float)
     4716            for time in range(time_step_count):
     4717                q_time[time,:] = quantities_init[i] #* time * 4
     4718           
     4719            #Write C files
     4720            columns = 3 # long, lat , depth
     4721            file = base_name + mux_names[i]
     4722            #print "base_name file",file
     4723            f = open(file, 'wb')
     4724            files.append(file)
     4725            f.write(pack('i',points_num))
     4726            f.write(pack('i',time_step_count))
     4727            f.write(pack('f',time_step))
     4728
     4729            #write lat/long info
     4730            for lonlatdep in lonlatdeps:
     4731                for float in lonlatdep:
     4732                    f.write(pack('f',float))
     4733                   
     4734            # Write quantity info
     4735            for time in  range(time_step_count):
     4736                for i in range(points_num):
     4737                    f.write(pack('f',q_time[time,i]))
     4738            f.close()
     4739        return base_name, files
     4740   
     4741    def write_mux(self,lat_long_points, time_step_count, time_step):
     4742        """
     4743        This will write 3 non-gridded mux files, for testing.
     4744        The na and va quantities will be the Easting values.
     4745        Depth and ua will be the Northing value.
     4746        """
     4747        #print "lat_long_points", lat_long_points
     4748        #print "time_step_count",time_step_count
     4749        #print "time_step",
     4750       
     4751        points_num = len(lat_long_points)
     4752        lonlatdeps = []
     4753        quantities = ['HA','UA','VA']
     4754        mux_names = ['-z-mux','-e-mux','-n-mux']
     4755        quantities_init = [[],[],[]]
     4756        # urs binary is latitude fastest
     4757        for point in lat_long_points:
     4758            lat = point[0]
     4759            lon = point[1]
     4760            _ , e, n = redfearn(lat, lon)
     4761            lonlatdeps.append([lon, lat, n])
     4762            quantities_init[0].append(e) # HA
     4763            quantities_init[1].append(n ) # UA
     4764            quantities_init[2].append(e) # VA
     4765               
    47064766        file_handle, base_name = tempfile.mkstemp("")
    47074767        os.close(file_handle)
     
    51215181       
    51225182    #### END TESTS URS UNGRIDDED 2 SWW ###
     5183    def test_Urs_points(self):
     5184        time_step_count = 3
     5185        time_step = 2
     5186        lat_long_points =[(-21.5,114.5),(-21.5,115),(-21.,115)]
     5187        base_name, files = self.write_mux(lat_long_points,
     5188                                          time_step_count, time_step)
     5189        for file in files:
     5190            urs = Urs_points(file)
     5191            assert time_step_count == urs.time_step_count
     5192            assert time_step == urs.time_step
     5193
     5194            for lat_lon, dep in map(None, lat_long_points, urs.lonlatdep):
     5195                    _ , e, n = redfearn(lat_lon[0], lat_lon[1])
     5196                    assert allclose(n, dep[2])
     5197                       
     5198            count = 0
     5199            for slice in urs:
     5200                count += 1
     5201                #print slice
     5202                for lat_lon, quantity in map(None, lat_long_points, slice):
     5203                    _ , e, n = redfearn(lat_lon[0], lat_lon[1])
     5204                    #print "quantity", quantity
     5205                    #print "e", e
     5206                    #print "n", n
     5207                    if file[-5:] == 'z-mux' or file[-5:] == 'n-mux' :
     5208                        assert allclose(e, quantity)
     5209                    if file[-5:] == 'e-mux':
     5210                        assert allclose(n, quantity)
     5211            assert count == time_step_count
     5212                     
     5213        self.delete_mux(files)
     5214
     5215    def test_urs_ungridded2sww (self):
     5216       
     5217        #Zone:   50   
     5218        #Easting:  240992.578  Northing: 7620442.472
     5219        #Latitude:   -21  30 ' 0.00000 ''  Longitude: 114  30 ' 0.00000 ''
     5220        lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]
     5221        time_step_count = 2
     5222        time_step = 400
     5223        base_name, files = self.write_mux(lat_long,
     5224                                          time_step_count, time_step)
     5225        urs_ungridded2sww(base_name)
     5226   
    51235227       
    51245228#-------------------------------------------------------------
    51255229if __name__ == "__main__":
    51265230    #suite = unittest.makeSuite(Test_Data_Manager,'test_URS_points_needed')
    5127     #suite = unittest.makeSuite(Test_Data_Manager,'dave_test_URS_points_needed')
    5128     #suite = unittest.makeSuite(Test_Data_Manager,'test_ferret2sww_lat_long')
     5231    #suite = unittest.makeSuite(Test_Data_Manager,'dave_test_URS_poinneeded')
     5232    #suite = unittest.makeSuite(Test_Data_Manager,'test_urs_')
    51295233    suite = unittest.makeSuite(Test_Data_Manager,'test')
    51305234    runner = unittest.TextTestRunner()
Note: See TracChangeset for help on using the changeset viewer.