import os from struct import unpack from anuga.utilities.anuga_exceptions import ANUGAError from Numeric import Float, Int, zeros from Numeric import concatenate, array, Float, Int, Int32, resize, \ sometrue, searchsorted, zeros, allclose, around, reshape, \ transpose, sort, NewAxis, ArrayType, compress, take, arange, \ argmax, alltrue, shape, Float32, size from anuga.utilities.numerical_tools import ensure_numeric from anuga.config import max_float from anuga.coordinate_transforms.geo_reference import Geo_reference, \ write_NetCDF_georeference, ensure_geo_reference """ The name of the urs file names must be; [basename_in]_velocity-z-mux2 [basename_in]_velocity-e-mux2 [basename_in]_waveheight-n-mux2 """ def read_binary_mux2(file_in,verbose=False): """ Program for demuxing small format (mux2) files. Based upon demux_compact_tgdata.c written by C. Thomas Geoscience Austrlia 2007 Author: John Jakeman Unlike c code this program only reads in one file (source) """ eps=0.00001 ###########E###### # Open mux2 File # ################## if (os.access(file_in, os.F_OK) == 0): msg = 'File %s does not exist or is not accessible' %file_name raise IOError, msg if file_in[-4:]!="mux2": msg ="This program operates only on multiplexed files in mux2 format\nAt least one input file name does not end with mux2" raise IOError, msg mux_file = open(file_in, 'rb') ###################### # Read in the header # ###################### # Number of points/stations (num_sites,)= unpack('i',mux_file.read(4)) geolat=zeros(num_sites,Float) geolon=zeros(num_sites,Float) depth=zeros(num_sites,Float) dt_old=0.0 nt_old=0.0 for i in range(num_sites): # location and water depth in geographic coordinates #-180/180 #-90/90 (geolat[i],)= unpack('f',mux_file.read(4)) #dt, float - time step, seconds (geolon[i],) = unpack('f', mux_file.read(4)) (mcolat,) = unpack('f', mux_file.read(4)) (mcolon,) = unpack('f', mux_file.read(4)) (ig,) = unpack('i', mux_file.read(4)) (ilon,) = unpack('i', mux_file.read(4)) #grid point location (ilat,) = unpack('i', mux_file.read(4)) (depth[i],) = unpack('f', mux_file.read(4)) (centerlat,) = unpack('f', mux_file.read(4)) (centerlon,) = unpack('f', mux_file.read(4)) (offset,) = unpack('f', mux_file.read(4)) (az,) = unpack('f', mux_file.read(4)) (baz,) = unpack('f', mux_file.read(4)) (dt,) = unpack('f', mux_file.read(4)) (nt,) = unpack('i', mux_file.read(4)) for j in range(4): # identifier (id,) = unpack('f', mux_file.read(4)) msg = "Bad data in the mux file." if num_sites < 0: mux_file.close() raise ANUGAError, msg if dt < 0: mux_file.close() raise ANUGAError, msg if nt < 0: mux_file.close() raise ANUGAError, msg if (verbose): print "num_stations:",num_sites print "station number:",i print "geolat:",geolat[i] print "geolon:",geolon[i] print "mcolat:",mcolat print "mcolo:",mcolon print "ig:",ig print "ilon:",ilon print "ilat:",ilat print "depth:",depth[i] print "centerlat:",centerlat print "centerlon:",centerlon print "offset:",offset print "az:",az print "baz:",baz print "tstep",dt print "num_tsteps",nt print if i>0: msg="Stations have different time step size" assert (dt==dt_old),msg msg="Stations have different time series length" assert (nt==nt_old),msg dt_old=dt nt_old=nt ################ # Read in Data # ################ # Make an array to hold the start and stop steps for each station for source first_tstep=zeros(num_sites,Int) last_tstep=zeros(num_sites,Int) # Read the start and stop steps for each site into the array for source 0 for i in range(num_sites): (first_tstep[i],)=unpack('i', mux_file.read(4)) for i in range(num_sites): (last_tstep[i],)=unpack('i', mux_file.read(4)) # Compute the size of the data block numDataTotal=0 numData=zeros(num_sites,Int) last_output_step=0 for i in range(num_sites): numDataTotal+= last_tstep[i]-first_tstep[i]+1 numData[i]= last_tstep[i]-first_tstep[i]+1 last_output_step=max(last_output_step,last_tstep[i]) numDataTotal+=last_output_step msg="Size of Data Block is negative" assert numDataTotal>=0,msg muxData = zeros(numDataTotal,Float) for i in range(numDataTotal): (muxData[i],)=unpack('f', mux_file.read(4)) ################## # Store Mux data # ################## # Make arrays of starting and finishing time steps for the tide gauges # and fill them from the file data=zeros((num_sites,nt),Float) maximum=zeros(num_sites,Float) # Find when first station starts recording min_tstep = min(first_tstep) #not necessary # Find when all stations have stoped recording max_tstep = max(last_tstep) times=zeros(max_tstep,Float) beg=dt for i in range(num_sites): if (verbose): print 'reading site '+str(i) istart=-1 istop=-1 offset=0 # Update start and stop timesteps for this gauge if istart==-1: istart=first_tstep[i] else: istart=min(first_tstep[i],istart) if istop==-1: istop=last_tstep[i] else: istop=min(last_tstep[i],istop) for t in range(max_tstep): last_t=t offset+=1 # Skip records from earlier tide gauges for j in range(i): if (t+1>=first_tstep[j]) and (t+1<=last_tstep[j]): offset+=1 # Deal with the tide gauge at hand if (t+1>=first_tstep[i]) and (t+1<=last_tstep[i]): # Gauge is recording at this time] data[i][t]=muxData[offset] offset+=1 elif (t+1=first_tstep[j]) and (t+1<=last_tstep[j]): offset+=1 if i==0: times[t]=beg+t*dt if (last_t eps): maximum[i]=max(data[i][t],maximum[i]) if i==0: times[t]=beg+t*dt msg = "Mux file corrupted. No positive height found at station "+str(i) assert maximum[i]>0, msg return times, geolat, geolon, -depth, data class Write_sts: sts_quantities = ['stage'] RANGE = '_range' EXTREMA = ':extrema' def __init__(self): pass def store_header(self, outfile, times, number_of_points, description='Converted from URS mux2 format', sts_precision=Float32, verbose=False): """ outfile - the name of the file that will be written times - A list of the time slice times OR a start time Note, if a list is given the info will be made relative. number_of_points - the number of urs gauges sites """ outfile.institution = 'Geoscience Australia' outfile.description = description try: revision_number = get_revision_number() except: revision_number = None # Allow None to be stored as a string outfile.revision_number = str(revision_number) # times - A list or array of the time slice times OR a start time # Start time in seconds since the epoch (midnight 1/1/1970) # This is being used to seperate one number from a list. # what it is actually doing is sorting lists from numeric arrays. if type(times) is list or type(times) is ArrayType: number_of_times = len(times) times = ensure_numeric(times) if number_of_times == 0: starttime = 0 else: starttime = times[0] times = times - starttime #Store relative times else: number_of_times = 0 starttime = times outfile.starttime = starttime # Dimension definitions outfile.createDimension('number_of_points', number_of_points) outfile.createDimension('number_of_timesteps', number_of_times) outfile.createDimension('numbers_in_range', 2) # Variable definitions outfile.createVariable('x', sts_precision, ('number_of_points',)) outfile.createVariable('y', sts_precision, ('number_of_points',)) outfile.createVariable('elevation', sts_precision,('number_of_points',)) q = 'elevation' outfile.createVariable(q+Write_sts.RANGE, sts_precision, ('numbers_in_range',)) # Initialise ranges with small and large sentinels. # If this was in pure Python we could have used None sensibly outfile.variables[q+Write_sts.RANGE][0] = max_float # Min outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max outfile.createVariable('z', sts_precision, ('number_of_points',)) # Doing sts_precision instead of Float gives cast errors. outfile.createVariable('time', Float, ('number_of_timesteps',)) for q in Write_sts.sts_quantities: outfile.createVariable(q, sts_precision, ('number_of_timesteps', 'number_of_points')) outfile.createVariable(q+Write_sts.RANGE, sts_precision, ('numbers_in_range',)) # Initialise ranges with small and large sentinels. # If this was in pure Python we could have used None sensibly outfile.variables[q+Write_sts.RANGE][0] = max_float # Min outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max if type(times) is list or type(times) is ArrayType: outfile.variables['time'][:] = times #Store time relative if verbose: print '------------------------------------------------' print 'Statistics:' print ' t in [%f, %f], len(t) == %d'\ %(min(times.flat), max(times.flat), len(times.flat)) def store_points(self, outfile, points_utm, elevation, zone=None, new_origin=None, points_georeference=None, verbose=False): """ points_utm - currently a list or array of the points in UTM. points_georeference - the georeference of the points_utm How about passing new_origin and current_origin. If you get both, do a convertion from the old to the new. If you only get new_origin, the points are absolute, convert to relative if you only get the current_origin the points are relative, store as relative. if you get no georefs create a new georef based on the minimums of points_utm. (Another option would be to default to absolute) Yes, and this is done in another part of the code. Probably geospatial. If you don't supply either geo_refs, then supply a zone. If not the default zone will be used. precondition: header has been called. """ number_of_points = len(points_utm) points_utm = array(points_utm) # given the two geo_refs and the points, do the stuff # described in the method header points_georeference = ensure_geo_reference(points_georeference) new_origin = ensure_geo_reference(new_origin) if new_origin is None and points_georeference is not None: points = points_utm geo_ref = points_georeference else: if new_origin is None: new_origin = Geo_reference(zone,min(points_utm[:,0]), min(points_utm[:,1])) points = new_origin.change_points_geo_ref(points_utm, points_georeference) geo_ref = new_origin # At this stage I need a georef and points # the points are relative to the georef geo_ref.write_NetCDF(outfile) x = points[:,0] y = points[:,1] z = outfile.variables['z'][:] if verbose: print '------------------------------------------------' print 'More Statistics:' print ' Extent (/lon):' print ' x in [%f, %f], len(lat) == %d'\ %(min(x), max(x), len(x)) print ' y in [%f, %f], len(lon) == %d'\ %(min(y), max(y), len(y)) print ' z in [%f, %f], len(z) == %d'\ %(min(elevation), max(elevation), len(elevation)) print 'geo_ref: ',geo_ref print '------------------------------------------------' #z = resize(bath_grid,outfile.variables['z'][:].shape) #print "points[:,0]", points[:,0] outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner() outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner() outfile.variables['z'][:] = elevation outfile.variables['elevation'][:] = elevation #FIXME HACK4 q = 'elevation' # This updates the _range values outfile.variables[q+Write_sts.RANGE][0] = min(elevation) outfile.variables[q+Write_sts.RANGE][1] = max(elevation) def store_quantities(self, outfile, sts_precision=Float32, slice_index=None, time=None, verbose=False, **quant): """ Write the quantity info. **quant is extra keyword arguments passed in. These must be the sts quantities, currently; stage. if the time array is already been built, use the slice_index to specify the index. Otherwise, use time to increase the time dimension Maybe make this general, but the viewer assumes these quantities, so maybe we don't want it general - unless the viewer is general precondition: triangulation and header have been called. """ if time is not None: file_time = outfile.variables['time'] slice_index = len(file_time) file_time[slice_index] = time # Write the conserved quantities from Domain. # Typically stage, xmomentum, ymomentum # other quantities will be ignored, silently. # Also write the ranges: stage_range for q in Write_sts.sts_quantities: if not quant.has_key(q): msg = 'STS file can not write quantity %s' %q raise NewQuantity, msg else: q_values = quant[q] outfile.variables[q][slice_index] = \ q_values.astype(sts_precision) # This updates the _range values q_range = outfile.variables[q+Write_sts.RANGE][:] q_values_min = min(q_values) if q_values_min < q_range[0]: outfile.variables[q+Write_sts.RANGE][0] = q_values_min q_values_max = max(q_values) if q_values_max > q_range[1]: outfile.variables[q+Write_sts.RANGE][1] = q_values_max def urs2sts(basename_in, basename_out = None, verbose = False, origin = None, mean_stage=0.0,zscale=1.0, minlat = None, maxlat = None, minlon = None, maxlon = None): """Convert URS mux2 format for wave propagation to sts format Specify only basename_in and read files of the form out_waveheight-z-mux2 Also convert latitude and longitude to UTM. All coordinates are assumed to be given in the GDA94 datum origin is a 3-tuple with geo referenced UTM coordinates (zone, easting, northing) """ import os from Scientific.IO.NetCDF import NetCDFFile from Numeric import Float, Int, Int32, searchsorted, zeros, array from Numeric import allclose, around precision = Float msg = 'Must use latitudes and longitudes for minlat, maxlon etc' if minlat != None: assert -90 < minlat < 90 , msg if maxlat != None: assert -90 < maxlat < 90 , msg if minlat != None: assert maxlat > minlat if minlon != None: assert -180 < minlon < 180 , msg if maxlon != None: assert -180 < maxlon < 180 , msg if minlon != None: assert maxlon > minlon if basename_out is None: stsname = basename_in + '.sts' else: stsname = basename_out + '.sts' if (verbose): print 'reading mux2 file' times_urs, latitudes_urs, longitudes_urs, elevations, urs_stage = read_binary_mux2(basename_in,verbose) if (minlat is not None) and (minlon is not None) and (maxlat is not None) and (maxlon is not None): latitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),latitudes_urs) longitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),longitudes_urs) times = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),times_urs) number_of_points = latitudes.shape[0] number_of_times = times.shape[0] number_of_latitudes = latitudes.shape[0] number_of_longitudes = longitudes.shape[0] # NetCDF file definition outfile = NetCDFFile(stsname, 'w') description = 'Converted from URS mux2 files: %s'\ %(basename_in) # Create new file starttime = times[0] sts = Write_sts() sts.store_header(outfile, times,number_of_points, description=description, verbose=verbose,sts_precision=Float) # Store from anuga.coordinate_transforms.redfearn import redfearn x = zeros(number_of_points, Float) #Easting y = zeros(number_of_points, Float) #Northing # Check zone boundaries refzone, _, _ = redfearn(latitudes[0],longitudes[0]) for i in range(number_of_points): zone, easting, northing = redfearn(latitudes[i],longitudes[i]) x[i] = easting y[i] = northing #print zone,easting,northing if origin is None: origin = Geo_reference(refzone,min(x),min(y)) geo_ref = write_NetCDF_georeference(origin, outfile) z = elevations #print geo_ref.get_xllcorner() #print geo_ref.get_yllcorner() z = resize(z,outfile.variables['z'][:].shape) outfile.variables['x'][:] = x - geo_ref.get_xllcorner() outfile.variables['y'][:] = y - geo_ref.get_yllcorner() outfile.variables['z'][:] = z #FIXME HACK for bacwards compat. outfile.variables['elevation'][:] = z stage = outfile.variables['stage'] #print urs_stage.shape for j in range(len(times)): for i in range(number_of_points): w = zscale*urs_stage[i,j] + mean_stage stage[j,i] = w outfile.close() def read_sts(filename): from Scientific.IO.NetCDF import NetCDFFile from Numeric import asarray fid = NetCDFFile(filename+'.sts', 'r') #Open existing file for read time = fid.variables['time'] #Timesteps # Get the variables as Numeric arrays x = fid.variables['x'][:] #x-coordinates of vertices y = fid.variables['y'][:] #y-coordinates of vertices elevation = fid.variables['elevation'] #Elevation stage = fid.variables['stage'] #Water level starttime = fid.starttime[0] coordinates=transpose(asarray([x.tolist(),y.tolist()])) conserved_quantities = [] other_quantities = [] for quantity in fid.variables.keys(): dimensions = fid.variables[quantity].dimensions if 'number_of_timesteps' in dimensions: conserved_quantities.append(quantity) else: other_quantities.append(quantity) other_quantities.remove('x') other_quantities.remove('y') other_quantities.remove('z') try: other_quantities.remove('stage_range') other_quantities.remove('elevation_range') except: pass from pylab import plot,show plot(x/1000,y/1000,'o') show() #def plot_sts_gauges(): file_in = "big_out_waveheight-z-mux2" file_out="big" east=99 west=98 north=2 south=0 urs2sts(file_in,file_out,verbose=False,minlat=south,maxlat=north,minlon=west,maxlon=east) read_sts(file_out)