Ignore:
Timestamp:
Oct 10, 2006, 11:02:18 AM (17 years ago)
Author:
duncan
Message:

added urs2sww to convert urs tsunami info to an sww boundary

File:
1 edited

Legend:

Unmodified
Added
Removed
  • anuga_core/source/anuga/shallow_water/data_manager.py

    r3694 r3720  
    6060import csv
    6161import os
     62from struct import unpack
     63import array as p_array
    6264
    6365from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
    64      searchsorted, zeros, allclose, around
     66     searchsorted, zeros, allclose, around, reshape
     67from Scientific.IO.NetCDF import NetCDFFile
    6568
    6669
     
    6871from anuga.geospatial_data.geospatial_data import Geospatial_data
    6972from anuga.config import minimum_storable_height as default_minimum_storable_height
     73from anuga.utilities.numerical_tools import ensure_numeric
     74from anuga.caching.caching import myhash
     75from anuga.utilities.anuga_exceptions import ANUGAError
    7076
    7177def make_filename(s):
     
    40434049    return
    40444050
     4051    ####  URS 2 SWW  ###
     4052
     4053lon_name = 'LON'
     4054lat_name = 'LAT'
     4055time_name = 'TIME'
     4056precision = Float # So if we want to change the precision its done here       
     4057class Write_nc:
     4058    """
     4059    Write an nc file.
     4060   
     4061    Note, this should be checked to meet cdc netcdf conventions for gridded
     4062    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
     4063   
     4064    """
     4065    def __init__(self,
     4066                 quantity_name,
     4067                 file_name,
     4068                 time_step_count,
     4069                 time_step,
     4070                 lon,
     4071                 lat):
     4072        """
     4073        time_step_count is the number of time steps.
     4074        time_step is the time step size
     4075       
     4076        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
     4077        """
     4078        self.quantity_name = quantity_name
     4079        quantity_units= {'HA':'METERS',
     4080                              'UA':'METERS/SECOND',
     4081                              'VA':'METERS/SECOND'}
     4082       
     4083        #self.file_name = file_name
     4084        self.time_step_count = time_step_count
     4085        self.time_step = time_step
     4086
     4087        # NetCDF file definition
     4088        self.outfile = NetCDFFile(file_name, 'w')
     4089        outfile = self.outfile       
     4090
     4091        #Create new file
     4092        nc_lon_lat_header(outfile, lon, lat)
     4093   
     4094        # TIME
     4095        outfile.createDimension(time_name, None)
     4096        outfile.createVariable(time_name, precision, (time_name,))
     4097
     4098        #QUANTITY
     4099        outfile.createVariable(self.quantity_name, precision,
     4100                               (time_name, lat_name, lon_name))
     4101        outfile.variables[self.quantity_name].missing_value=-1.e+034
     4102        outfile.variables[self.quantity_name].units= \
     4103                                 quantity_units[self.quantity_name]
     4104        outfile.variables[lon_name][:]= ensure_numeric(lon)
     4105        outfile.variables[lat_name][:]= ensure_numeric(lat)
     4106
     4107        #Assume no one will be wanting to read this, while we are writing
     4108        #outfile.close()
     4109       
     4110    def store_timestep(self, quantity_slice):
     4111        """
     4112        quantity_slice is the data to be stored at this time step
     4113        """
     4114       
     4115        outfile = self.outfile
     4116       
     4117        # Get the variables
     4118        time = outfile.variables[time_name]
     4119        quantity = outfile.variables[self.quantity_name]
     4120           
     4121        i = len(time)
     4122
     4123        #Store time
     4124        time[i] = i*self.time_step #self.domain.time
     4125        quantity[i,:] = quantity_slice*100 # To convert from m to cm
     4126                                           # And m/s to cm/sec
     4127       
     4128    def close(self):
     4129        self.outfile.close()
     4130
     4131def urs2sww(basename_in='o', basename_out=None, verbose=False,
     4132            remove_nc_files=True,
     4133            minlat=None, maxlat=None,
     4134            minlon= None, maxlon=None,
     4135            mint=None, maxt=None,
     4136            mean_stage=0,
     4137            zscale=1,
     4138            fail_on_NaN=True,
     4139            NaN_filler=0,
     4140            elevation=None):
     4141    """
     4142    Convert URS C binary format for wave propagation to
     4143    sww format native to abstract_2d_finite_volumes.
     4144
     4145    Specify only basename_in and read files of the form
     4146    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
     4147    relative height, x-velocity and y-velocity, respectively.
     4148
     4149    Also convert latitude and longitude to UTM. All coordinates are
     4150    assumed to be given in the GDA94 datum. The latitude and longitude
     4151    information is for  a grid.
     4152
     4153    min's and max's: If omitted - full extend is used.
     4154    To include a value min may equal it, while max must exceed it.
     4155    Lat and lon are assuemd to be in decimal degrees
     4156
     4157    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
     4158    which means that latitude is the fastest
     4159    varying dimension (row major order, so to speak)
     4160
     4161    In URS C binary the latitudes and longitudes are in assending order.
     4162    """
     4163   
     4164    if basename_out == None:
     4165        basename_out = basename_in
     4166    files_out = urs2nc(basename_in, basename_out)
     4167    ferret2sww(basename_out,
     4168               minlat=minlat,
     4169               maxlat=maxlat,
     4170               minlon=minlon,
     4171               mint=mint,
     4172               maxt=maxt,
     4173               mean_stage=mean_stage,
     4174               zscale=zscale,
     4175               fail_on_NaN=fail_on_NaN,
     4176               NaN_filler=NaN_filler,
     4177               inverted_bathymetry=True,
     4178               verbose=verbose)
     4179    #print "files_out",files_out
     4180    if remove_nc_files:
     4181        for file_out in files_out:
     4182            os.remove(file_out)
     4183   
     4184def urs2nc(basename_in = 'o', basename_out = 'urs'):
     4185    files_in = [basename_in+'-z-mux',
     4186                basename_in+'-e-mux',
     4187                basename_in+'-n-mux']
     4188    files_out = [basename_out+'_ha.nc',
     4189                 basename_out+'_ua.nc',
     4190                 basename_out+'_va.nc']
     4191    quantities = ['HA','UA','VA']
     4192
     4193    hashed_elevation = None
     4194    for file_in, file_out, quantity in map(None, files_in,
     4195                                           files_out,
     4196                                           quantities):
     4197        lonlatdep, lon, lat = _binary_c2nc(file_in,
     4198                                         file_out,
     4199                                         quantity)
     4200        #print "lon",lon
     4201        #print "lat",lat
     4202        if hashed_elevation == None:
     4203            elevation_file = basename_out+'_e.nc'
     4204            write_elevation_sww(elevation_file,
     4205                                lon,
     4206                                lat,
     4207                                lonlatdep[:,2])
     4208            hashed_elevation = myhash(lonlatdep)
     4209        else:
     4210            msg = "The elevation information in the mux files is inconsistent"
     4211            assert hashed_elevation == myhash(lonlatdep), msg
     4212    files_out.append(elevation_file)
     4213    return files_out
     4214   
     4215def _binary_c2nc(file_in, file_out, quantity):
     4216    """
     4217
     4218    return the depth info, so it can be written to a file
     4219    """
     4220    columns = 3 # long, lat , depth
     4221    f = open(file_in, 'rb')
     4222   
     4223    # Number of points/stations
     4224    (points_num,)= unpack('i',f.read(4))
     4225
     4226    # nt, int - Number of time steps
     4227    (time_step_count,)= unpack('i',f.read(4))
     4228
     4229    #dt, float - time step, seconds
     4230    (time_step,) = unpack('f', f.read(4))
     4231   
     4232    msg = "Bad data in the mux file."
     4233    if points_num < 0:
     4234        raise ANUGAError, msg
     4235    if time_step_count < 0:
     4236        raise ANUGAError, msg
     4237    if time_step < 0:
     4238        raise ANUGAError, msg
     4239   
     4240    lonlatdep = p_array.array('f')
     4241    lonlatdep.read(f, columns * points_num)
     4242    lonlatdep = array(lonlatdep, typecode=Float)   
     4243    lonlatdep = reshape(lonlatdep, ( points_num, columns))
     4244   
     4245    lon, lat = lon_lat2grid(lonlatdep)
     4246    nc_file = Write_nc(quantity,
     4247                       file_out,
     4248                        time_step_count,
     4249                        time_step,
     4250                        lon,
     4251                        lat)
     4252
     4253    for i in range(time_step_count):
     4254        #Read in a time slice   
     4255        hz_p_array = p_array.array('f')
     4256        hz_p_array.read(f, points_num)
     4257        hz_p = array(hz_p_array, typecode=Float)
     4258        hz_p = reshape(hz_p, ( len(lat), len(lon)))
     4259       
     4260        nc_file.store_timestep(hz_p)
     4261       
     4262    nc_file.close()
     4263
     4264    return lonlatdep, lon, lat
     4265   
     4266
     4267def write_elevation_sww(file_out, lon, lat, depth_vector):
     4268     
     4269    # NetCDF file definition
     4270    outfile = NetCDFFile(file_out, 'w')
     4271
     4272    #Create new file
     4273    nc_lon_lat_header(outfile, lon, lat)
     4274   
     4275    # ELEVATION
     4276    zname = 'ELEVATION'
     4277    outfile.createVariable(zname, precision, (lat_name, lon_name))
     4278    outfile.variables[zname].units='CENTIMETERS'
     4279    outfile.variables[zname].missing_value=-1.e+034
     4280
     4281    outfile.variables[lon_name][:]= ensure_numeric(lon)
     4282    outfile.variables[lat_name][:]= ensure_numeric(lat)
     4283
     4284    depth = reshape(depth_vector, ( len(lat), len(lon)))
     4285    #print "depth",depth
     4286    outfile.variables[zname][:]= depth
     4287   
     4288    outfile.close()
     4289   
     4290def nc_lon_lat_header(outfile, lon, lat):
     4291   
     4292    outfile.institution = 'Geoscience Australia'
     4293    outfile.description = 'Converted from URS binary C'
     4294   
     4295    # Longitude
     4296    outfile.createDimension(lon_name, len(lon))
     4297    outfile.createVariable(lon_name, precision, (lon_name,))
     4298    outfile.variables[lon_name].point_spacing='uneven'
     4299    outfile.variables[lon_name].units='degrees_east'
     4300    outfile.variables[lon_name].assignValue(lon)
     4301
     4302
     4303    # Latitude
     4304    outfile.createDimension(lat_name, len(lat))
     4305    outfile.createVariable(lat_name, precision, (lat_name,))
     4306    outfile.variables[lat_name].point_spacing='uneven'
     4307    outfile.variables[lat_name].units='degrees_north'
     4308    outfile.variables[lat_name].assignValue(lat)
     4309
     4310
     4311   
     4312def lon_lat2grid(long_lat_dep):
     4313    """
     4314    given a list of points that are assumed to be an a grid,
     4315    return the long's and lat's of the grid.
     4316    long_lat_dep is an array where each row is a position.
     4317    The first column is longitudes.
     4318    The second column is latitudes.
     4319    """
     4320    LONG = 0
     4321    LAT = 1
     4322    points_num = len(long_lat_dep)
     4323    lat = [long_lat_dep[0][LAT]]
     4324    this_rows_long = long_lat_dep[0][LONG]
     4325    i = 1 # Index of long_lat_dep
     4326
     4327    #Working out the lat's
     4328    while long_lat_dep[i][LONG] == this_rows_long and i < points_num:
     4329            lat.append(long_lat_dep[i][LAT])
     4330            i += 1
     4331           
     4332    lats_per_long = i       
     4333    long = [long_lat_dep[0][LONG]]
     4334    #Working out the longs
     4335    while i < points_num:
     4336        msg = 'Data is not gridded.  It must be for this operation'
     4337        assert long_lat_dep[i][LAT] == lat[i%lats_per_long], msg
     4338        if i%lats_per_long == 0:
     4339            long.append(long_lat_dep[i][LONG])
     4340        i += 1
     4341   
     4342    msg = 'Our of range latitudes/longitudes'
     4343    for l in lat:assert -90 < l < 90 , msg
     4344    for l in long:assert -180 < l < 180 , msg
     4345       
     4346    return long, lat
     4347
     4348    ####  END URS 2 SWW  ###     
    40454349#-------------------------------------------------------------
    40464350if __name__ == "__main__":
Note: See TracChangeset for help on using the changeset viewer.