source: trunk/anuga_core/source/anuga/file_conversion/esri2sww.py @ 7758

Last change on this file since 7758 was 7758, checked in by hudson, 13 years ago

Added extra file conversion classes to file_conversion.

File size: 11.8 KB
Line 
1
2##
3# @brief Convert CSIRO ESRI file to an SWW boundary file.
4# @param bath_dir
5# @param elevation_dir
6# @param ucur_dir
7# @param vcur_dir
8# @param sww_file
9# @param minlat
10# @param maxlat
11# @param minlon
12# @param maxlon
13# @param zscale
14# @param mean_stage
15# @param fail_on_NaN
16# @param elevation_NaN_filler
17# @param bath_prefix
18# @param elevation_prefix
19# @param verbose
20# @note Also convert latitude and longitude to UTM. All coordinates are
21#       assumed to be given in the GDA94 datum.
22def esri2sww(bath_dir,
23                  elevation_dir,
24                  ucur_dir,
25                  vcur_dir,
26                  sww_file,
27                  minlat=None, maxlat=None,
28                  minlon=None, maxlon=None,
29                  zscale=1,
30                  mean_stage=0,
31                  fail_on_NaN=True,
32                  elevation_NaN_filler=0,
33                  bath_prefix='ba',
34                  elevation_prefix='el',
35                  verbose=False):
36    """
37    Produce an sww boundary file, from esri ascii data from CSIRO.
38
39    Also convert latitude and longitude to UTM. All coordinates are
40    assumed to be given in the GDA94 datum.
41
42    assume:
43    All files are in esri ascii format
44
45    4 types of information
46    bathymetry
47    elevation
48    u velocity
49    v velocity
50
51    Assumptions
52    The metadata of all the files is the same
53    Each type is in a seperate directory
54    One bath file with extention .000
55    The time period is less than 24hrs and uniform.
56    """
57
58    from Scientific.IO.NetCDF import NetCDFFile
59
60    from anuga.coordinate_transforms.redfearn import redfearn
61
62    # So if we want to change the precision it's done here
63    precision = netcdf_float
64
65    # go in to the bath dir and load the only file,
66    bath_files = os.listdir(bath_dir)
67    bath_file = bath_files[0]
68    bath_dir_file =  bath_dir + os.sep + bath_file
69    bath_metadata, bath_grid =  _read_asc(bath_dir_file)
70
71    #Use the date.time of the bath file as a basis for
72    #the start time for other files
73    base_start = bath_file[-12:]
74
75    #go into the elevation dir and load the 000 file
76    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
77                         + base_start
78
79    elevation_files = os.listdir(elevation_dir)
80    ucur_files = os.listdir(ucur_dir)
81    vcur_files = os.listdir(vcur_dir)
82    elevation_files.sort()
83
84    # the first elevation file should be the
85    # file with the same base name as the bath data
86    assert elevation_files[0] == 'el' + base_start
87
88    number_of_latitudes = bath_grid.shape[0]
89    number_of_longitudes = bath_grid.shape[1]
90    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
91
92    longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \
93                  for x in range(number_of_longitudes)]
94    latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \
95                 for y in range(number_of_latitudes)]
96
97     # reverse order of lat, so the first lat represents the first grid row
98    latitudes.reverse()
99
100    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],longitudes[:],
101                                                  minlat=minlat, maxlat=maxlat,
102                                                  minlon=minlon, maxlon=maxlon)
103
104    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
105    latitudes = latitudes[kmin:kmax]
106    longitudes = longitudes[lmin:lmax]
107    number_of_latitudes = len(latitudes)
108    number_of_longitudes = len(longitudes)
109    number_of_times = len(os.listdir(elevation_dir))
110    number_of_points = number_of_latitudes * number_of_longitudes
111    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
112
113    #Work out the times
114    if len(elevation_files) > 1:
115        # Assume: The time period is less than 24hrs.
116        time_period = (int(elevation_files[1][-3:]) \
117                       - int(elevation_files[0][-3:])) * 60*60
118        times = [x*time_period for x in range(len(elevation_files))]
119    else:
120        times = [0.0]
121
122    if verbose:
123        log.critical('------------------------------------------------')
124        log.critical('Statistics:')
125        log.critical('  Extent (lat/lon):')
126        log.critical('    lat in [%f, %f], len(lat) == %d'
127                     % (min(latitudes), max(latitudes), len(latitudes)))
128        log.critical('    lon in [%f, %f], len(lon) == %d'
129                     % (min(longitudes), max(longitudes), len(longitudes)))
130        log.critical('    t in [%f, %f], len(t) == %d'
131                     % (min(times), max(times), len(times)))
132
133    ######### WRITE THE SWW FILE #############
134
135    # NetCDF file definition
136    outfile = NetCDFFile(sww_file, netcdf_mode_w)
137
138    #Create new file
139    outfile.institution = 'Geoscience Australia'
140    outfile.description = 'Converted from XXX'
141
142    #For sww compatibility
143    outfile.smoothing = 'Yes'
144    outfile.order = 1
145
146    #Start time in seconds since the epoch (midnight 1/1/1970)
147    outfile.starttime = starttime = times[0]
148
149    # dimension definitions
150    outfile.createDimension('number_of_volumes', number_of_volumes)
151    outfile.createDimension('number_of_vertices', 3)
152    outfile.createDimension('number_of_points', number_of_points)
153    outfile.createDimension('number_of_timesteps', number_of_times)
154
155    # variable definitions
156    outfile.createVariable('x', precision, ('number_of_points',))
157    outfile.createVariable('y', precision, ('number_of_points',))
158    outfile.createVariable('elevation', precision, ('number_of_points',))
159
160    #FIXME: Backwards compatibility
161    #outfile.createVariable('z', precision, ('number_of_points',))
162    #################################
163
164    outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
165                                                   'number_of_vertices'))
166
167    outfile.createVariable('time', precision, ('number_of_timesteps',))
168
169    outfile.createVariable('stage', precision, ('number_of_timesteps',
170                                                'number_of_points'))
171
172    outfile.createVariable('xmomentum', precision, ('number_of_timesteps',
173                                                    'number_of_points'))
174
175    outfile.createVariable('ymomentum', precision, ('number_of_timesteps',
176                                                    'number_of_points'))
177
178    #Store
179    from anuga.coordinate_transforms.redfearn import redfearn
180
181    x = num.zeros(number_of_points, num.float)  #Easting
182    y = num.zeros(number_of_points, num.float)  #Northing
183
184    if verbose: log.critical('Making triangular grid')
185
186    #Get zone of 1st point.
187    refzone, _, _ = redfearn(latitudes[0], longitudes[0])
188
189    vertices = {}
190    i = 0
191    for k, lat in enumerate(latitudes):
192        for l, lon in enumerate(longitudes):
193            vertices[l,k] = i
194
195            zone, easting, northing = redfearn(lat, lon)
196
197            #msg = 'Zone boundary crossed at longitude =', lon
198            #assert zone == refzone, msg
199            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
200            x[i] = easting
201            y[i] = northing
202            i += 1
203
204    #Construct 2 triangles per 'rectangular' element
205    volumes = []
206    for l in range(number_of_longitudes-1):    #X direction
207        for k in range(number_of_latitudes-1): #Y direction
208            v1 = vertices[l,k+1]
209            v2 = vertices[l,k]
210            v3 = vertices[l+1,k+1]
211            v4 = vertices[l+1,k]
212
213            #Note, this is different to the ferrit2sww code
214            #since the order of the lats is reversed.
215            volumes.append([v1,v3,v2]) #Upper element
216            volumes.append([v4,v2,v3]) #Lower element
217
218    volumes = num.array(volumes, num.int)      #array default#
219
220    geo_ref = Geo_reference(refzone, min(x), min(y))
221    geo_ref.write_NetCDF(outfile)
222
223    # This will put the geo ref in the middle
224    #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)
225
226    if verbose:
227        log.critical('------------------------------------------------')
228        log.critical('More Statistics:')
229        log.critical('  Extent (/lon):')
230        log.critical('    x in [%f, %f], len(lat) == %d'
231                     % (min(x), max(x), len(x)))
232        log.critical('    y in [%f, %f], len(lon) == %d'
233                     % (min(y), max(y), len(y)))
234        log.critical('geo_ref: ', geo_ref)
235
236    z = num.resize(bath_grid,outfile.variables['elevation'][:].shape)
237    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
238    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
239    # FIXME (Ole): Remove once viewer has been recompiled and changed
240    #              to use elevation instead of z
241    #outfile.variables['z'][:] = z
242    outfile.variables['elevation'][:] = z
243    outfile.variables['volumes'][:] = volumes.astype(num.int32) # On Opteron 64
244
245    stage = outfile.variables['stage']
246    xmomentum = outfile.variables['xmomentum']
247    ymomentum = outfile.variables['ymomentum']
248
249    outfile.variables['time'][:] = times   #Store time relative
250
251    if verbose: log.critical('Converting quantities')
252
253    n = number_of_times
254    for j in range(number_of_times):
255        # load in files
256        elevation_meta, elevation_grid = \
257            _read_asc(elevation_dir + os.sep + elevation_files[j])
258
259        _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
260        _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])
261
262        #cut matrix to desired size
263        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
264        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
265        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
266
267        # handle missing values
268        missing = (elevation_grid == elevation_meta['NODATA_value'])
269        if num.sometrue (missing):
270            if fail_on_NaN:
271                msg = 'File %s contains missing values' \
272                      % (elevation_files[j])
273                raise DataMissingValuesError, msg
274            else:
275                elevation_grid = elevation_grid*(missing==0) \
276                                 + missing*elevation_NaN_filler
277
278        if verbose and j % ((n+10)/10) == 0: log.critical('  Doing %d of %d'
279                                                          % (j, n))
280
281        i = 0
282        for k in range(number_of_latitudes):      #Y direction
283            for l in range(number_of_longitudes): #X direction
284                w = zscale*elevation_grid[k,l] + mean_stage
285                stage[j,i] = w
286                h = w - z[i]
287                xmomentum[j,i] = u_momentum_grid[k,l]*h
288                ymomentum[j,i] = v_momentum_grid[k,l]*h
289                i += 1
290
291    outfile.close()
292
293
294
295def _read_asc(filename, verbose=False):
296    """Read esri file from the following ASCII format (.asc)
297
298    Example:
299
300    ncols         3121
301    nrows         1800
302    xllcorner     722000
303    yllcorner     5893000
304    cellsize      25
305    NODATA_value  -9999
306    138.3698 137.4194 136.5062 135.5558 ..........
307
308    filename Path to the file to read.
309    verbose True if this function is to be verbose.
310    """
311
312    datafile = open(filename)
313
314    if verbose: log.critical('Reading DEM from %s' % filename)
315
316    lines = datafile.readlines()
317    datafile.close()
318
319    if verbose: log.critical('Got %d lines' % len(lines))
320
321    ncols = int(lines.pop(0).split()[1].strip())
322    nrows = int(lines.pop(0).split()[1].strip())
323    xllcorner = float(lines.pop(0).split()[1].strip())
324    yllcorner = float(lines.pop(0).split()[1].strip())
325    cellsize = float(lines.pop(0).split()[1].strip())
326    NODATA_value = float(lines.pop(0).split()[1].strip())
327
328    assert len(lines) == nrows
329
330    #Store data
331    grid = []
332
333    n = len(lines)
334    for i, line in enumerate(lines):
335        cells = line.split()
336        assert len(cells) == ncols
337        grid.append(num.array([float(x) for x in cells]))
338    grid = num.array(grid)
339
340    return {'xllcorner':xllcorner,
341            'yllcorner':yllcorner,
342            'cellsize':cellsize,
343            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.