source: trunk/anuga_core/source/anuga/file_conversion/esri2sww.py @ 8780

Last change on this file since 8780 was 8780, checked in by steve, 11 years ago

Some changes to allow netcdf4 use

File size: 11.4 KB
Line 
1
2
3def esri2sww(bath_dir,
4                  elevation_dir,
5                  ucur_dir,
6                  vcur_dir,
7                  sww_file,
8                  minlat=None, maxlat=None,
9                  minlon=None, maxlon=None,
10                  zscale=1,
11                  mean_stage=0,
12                  fail_on_NaN=True,
13                  elevation_NaN_filler=0,
14                  bath_prefix='ba',
15                  elevation_prefix='el',
16                  verbose=False):
17    """
18    Produce an sww boundary file, from esri ascii data from CSIRO.
19
20    Also convert latitude and longitude to UTM. All coordinates are
21    assumed to be given in the GDA94 datum.
22
23    assume:
24    All files are in esri ascii format
25
26    4 types of information
27    bathymetry
28    elevation
29    u velocity
30    v velocity
31
32    Assumptions
33    The metadata of all the files is the same
34    Each type is in a seperate directory
35    One bath file with extention .000
36    The time period is less than 24hrs and uniform.
37    """
38
39    from anuga.file.netcdf import NetCDFFile
40
41    from anuga.coordinate_transforms.redfearn import redfearn
42
43    if sww_file[-4:] != '.sww':
44        raise IOError('Output file %s should be of type .sww.' % sww_file)
45
46    # So if we want to change the precision it's done here
47    precision = netcdf_float
48
49    # go in to the bath dir and load the only file,
50    bath_files = os.listdir(bath_dir)
51    bath_file = bath_files[0]
52    bath_dir_file =  bath_dir + os.sep + bath_file
53    bath_metadata, bath_grid =  _read_asc(bath_dir_file)
54
55    #Use the date.time of the bath file as a basis for
56    #the start time for other files
57    base_start = bath_file[-12:]
58
59    #go into the elevation dir and load the 000 file
60    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
61                         + base_start
62
63    elevation_files = os.listdir(elevation_dir)
64    ucur_files = os.listdir(ucur_dir)
65    vcur_files = os.listdir(vcur_dir)
66    elevation_files.sort()
67
68    # the first elevation file should be the
69    # file with the same base name as the bath data
70    assert elevation_files[0] == 'el' + base_start
71
72    number_of_latitudes = bath_grid.shape[0]
73    number_of_longitudes = bath_grid.shape[1]
74    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
75
76    longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \
77                  for x in range(number_of_longitudes)]
78    latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \
79                 for y in range(number_of_latitudes)]
80
81     # reverse order of lat, so the first lat represents the first grid row
82    latitudes.reverse()
83
84    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],longitudes[:],
85                                                  minlat=minlat, maxlat=maxlat,
86                                                  minlon=minlon, maxlon=maxlon)
87
88    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
89    latitudes = latitudes[kmin:kmax]
90    longitudes = longitudes[lmin:lmax]
91    number_of_latitudes = len(latitudes)
92    number_of_longitudes = len(longitudes)
93    number_of_times = len(os.listdir(elevation_dir))
94    number_of_points = number_of_latitudes * number_of_longitudes
95    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
96
97    #Work out the times
98    if len(elevation_files) > 1:
99        # Assume: The time period is less than 24hrs.
100        time_period = (int(elevation_files[1][-3:]) \
101                       - int(elevation_files[0][-3:])) * 60*60
102        times = [x*time_period for x in range(len(elevation_files))]
103    else:
104        times = [0.0]
105
106    if verbose:
107        log.critical('------------------------------------------------')
108        log.critical('Statistics:')
109        log.critical('  Extent (lat/lon):')
110        log.critical('    lat in [%f, %f], len(lat) == %d'
111                     % (min(latitudes), max(latitudes), len(latitudes)))
112        log.critical('    lon in [%f, %f], len(lon) == %d'
113                     % (min(longitudes), max(longitudes), len(longitudes)))
114        log.critical('    t in [%f, %f], len(t) == %d'
115                     % (min(times), max(times), len(times)))
116
117    ######### WRITE THE SWW FILE #############
118
119    # NetCDF file definition
120    outfile = NetCDFFile(sww_file, netcdf_mode_w)
121
122    #Create new file
123    outfile.institution = 'Geoscience Australia'
124    outfile.description = 'Converted from XXX'
125
126    #For sww compatibility
127    outfile.smoothing = 'Yes'
128    outfile.order = 1
129
130    #Start time in seconds since the epoch (midnight 1/1/1970)
131    outfile.starttime = starttime = times[0]
132
133    # dimension definitions
134    outfile.createDimension('number_of_volumes', number_of_volumes)
135    outfile.createDimension('number_of_vertices', 3)
136    outfile.createDimension('number_of_points', number_of_points)
137    outfile.createDimension('number_of_timesteps', number_of_times)
138
139    # variable definitions
140    outfile.createVariable('x', precision, ('number_of_points',))
141    outfile.createVariable('y', precision, ('number_of_points',))
142    outfile.createVariable('elevation', precision, ('number_of_points',))
143
144    #FIXME: Backwards compatibility
145    #outfile.createVariable('z', precision, ('number_of_points',))
146    #################################
147
148    outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
149                                                   'number_of_vertices'))
150
151    outfile.createVariable('time', precision, ('number_of_timesteps',))
152
153    outfile.createVariable('stage', precision, ('number_of_timesteps',
154                                                'number_of_points'))
155
156    outfile.createVariable('xmomentum', precision, ('number_of_timesteps',
157                                                    'number_of_points'))
158
159    outfile.createVariable('ymomentum', precision, ('number_of_timesteps',
160                                                    'number_of_points'))
161
162    #Store
163    from anuga.coordinate_transforms.redfearn import redfearn
164
165    x = num.zeros(number_of_points, num.float)  #Easting
166    y = num.zeros(number_of_points, num.float)  #Northing
167
168    if verbose: log.critical('Making triangular grid')
169
170    #Get zone of 1st point.
171    refzone, _, _ = redfearn(latitudes[0], longitudes[0])
172
173    vertices = {}
174    i = 0
175    for k, lat in enumerate(latitudes):
176        for l, lon in enumerate(longitudes):
177            vertices[l,k] = i
178
179            zone, easting, northing = redfearn(lat, lon)
180
181            #msg = 'Zone boundary crossed at longitude =', lon
182            #assert zone == refzone, msg
183            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
184            x[i] = easting
185            y[i] = northing
186            i += 1
187
188    #Construct 2 triangles per 'rectangular' element
189    volumes = []
190    for l in range(number_of_longitudes-1):    #X direction
191        for k in range(number_of_latitudes-1): #Y direction
192            v1 = vertices[l,k+1]
193            v2 = vertices[l,k]
194            v3 = vertices[l+1,k+1]
195            v4 = vertices[l+1,k]
196
197            #Note, this is different to the ferrit2sww code
198            #since the order of the lats is reversed.
199            volumes.append([v1,v3,v2]) #Upper element
200            volumes.append([v4,v2,v3]) #Lower element
201
202    volumes = num.array(volumes, num.int)      #array default#
203
204    geo_ref = Geo_reference(refzone, min(x), min(y))
205    geo_ref.write_NetCDF(outfile)
206
207    # This will put the geo ref in the middle
208    #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)
209
210    if verbose:
211        log.critical('------------------------------------------------')
212        log.critical('More Statistics:')
213        log.critical('  Extent (/lon):')
214        log.critical('    x in [%f, %f], len(lat) == %d'
215                     % (min(x), max(x), len(x)))
216        log.critical('    y in [%f, %f], len(lon) == %d'
217                     % (min(y), max(y), len(y)))
218        log.critical('geo_ref: ', geo_ref)
219
220    z = num.resize(bath_grid,outfile.variables['elevation'][:].shape)
221    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
222    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
223    # FIXME (Ole): Remove once viewer has been recompiled and changed
224    #              to use elevation instead of z
225    #outfile.variables['z'][:] = z
226    outfile.variables['elevation'][:] = z
227    outfile.variables['volumes'][:] = volumes.astype(num.int32) # On Opteron 64
228
229    stage = outfile.variables['stage']
230    xmomentum = outfile.variables['xmomentum']
231    ymomentum = outfile.variables['ymomentum']
232
233    outfile.variables['time'][:] = times   #Store time relative
234
235    if verbose: log.critical('Converting quantities')
236
237    n = number_of_times
238    for j in range(number_of_times):
239        # load in files
240        elevation_meta, elevation_grid = \
241            _read_asc(elevation_dir + os.sep + elevation_files[j])
242
243        _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
244        _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])
245
246        #cut matrix to desired size
247        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
248        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
249        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
250
251        # handle missing values
252        missing = (elevation_grid == elevation_meta['NODATA_value'])
253        if num.sometrue (missing):
254            if fail_on_NaN:
255                msg = 'File %s contains missing values' \
256                      % (elevation_files[j])
257                raise DataMissingValuesError, msg
258            else:
259                elevation_grid = elevation_grid*(missing==0) \
260                                 + missing*elevation_NaN_filler
261
262        if verbose and j % ((n+10)/10) == 0: log.critical('  Doing %d of %d'
263                                                          % (j, n))
264
265        i = 0
266        for k in range(number_of_latitudes):      #Y direction
267            for l in range(number_of_longitudes): #X direction
268                w = zscale*elevation_grid[k,l] + mean_stage
269                stage[j,i] = w
270                h = w - z[i]
271                xmomentum[j,i] = u_momentum_grid[k,l]*h
272                ymomentum[j,i] = v_momentum_grid[k,l]*h
273                i += 1
274
275    outfile.close()
276
277
278
279def _read_asc(filename, verbose=False):
280    """Read esri file from the following ASCII format (.asc)
281
282    Example:
283
284    ncols         3121
285    nrows         1800
286    xllcorner     722000
287    yllcorner     5893000
288    cellsize      25
289    NODATA_value  -9999
290    138.3698 137.4194 136.5062 135.5558 ..........
291
292    filename Path to the file to read.
293    verbose True if this function is to be verbose.
294    """
295
296    datafile = open(filename)
297
298    if verbose: log.critical('Reading DEM from %s' % filename)
299
300    lines = datafile.readlines()
301    datafile.close()
302
303    if verbose: log.critical('Got %d lines' % len(lines))
304
305    ncols = int(lines.pop(0).split()[1].strip())
306    nrows = int(lines.pop(0).split()[1].strip())
307    xllcorner = float(lines.pop(0).split()[1].strip())
308    yllcorner = float(lines.pop(0).split()[1].strip())
309    cellsize = float(lines.pop(0).split()[1].strip())
310    NODATA_value = float(lines.pop(0).split()[1].strip())
311
312    assert len(lines) == nrows
313
314    #Store data
315    grid = []
316
317    n = len(lines)
318    for i, line in enumerate(lines):
319        cells = line.split()
320        assert len(cells) == ncols
321        grid.append(num.array([float(x) for x in cells]))
322    grid = num.array(grid)
323
324    return {'xllcorner':xllcorner,
325            'yllcorner':yllcorner,
326            'cellsize':cellsize,
327            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.