1 | """ |
---|
2 | Convert a ferret file to an SWW file. |
---|
3 | """ |
---|
4 | # external modules |
---|
5 | import numpy as num |
---|
6 | |
---|
7 | |
---|
8 | # ANUGA modules |
---|
9 | from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_float |
---|
10 | from anuga.file.sww import Write_sww |
---|
11 | from anuga.coordinate_transforms.geo_reference import Geo_reference, \ |
---|
12 | write_NetCDF_georeference |
---|
13 | import anuga.utilities.log as log |
---|
14 | |
---|
15 | #local modules |
---|
16 | from anuga.file_conversion.file_conversion import get_min_max_indices |
---|
17 | |
---|
18 | |
---|
19 | def ferret2sww(basename_in, basename_out=None, |
---|
20 | verbose=False, |
---|
21 | minlat=None, maxlat=None, |
---|
22 | minlon=None, maxlon=None, |
---|
23 | mint=None, maxt=None, mean_stage=0, |
---|
24 | origin=None, zscale=1, |
---|
25 | fail_on_NaN=True, |
---|
26 | NaN_filler=0, |
---|
27 | elevation=None, |
---|
28 | inverted_bathymetry=True |
---|
29 | ): #FIXME: Bathymetry should be obtained |
---|
30 | #from MOST somehow. |
---|
31 | #Alternatively from elsewhere |
---|
32 | #or, as a last resort, |
---|
33 | #specified here. |
---|
34 | #The value of -100 will work |
---|
35 | #for the Wollongong tsunami |
---|
36 | #scenario but is very hacky |
---|
37 | """Convert MOST and 'Ferret' NetCDF format for wave propagation to |
---|
38 | sww format native to abstract_2d_finite_volumes. |
---|
39 | |
---|
40 | Specify only basename_in and read files of the form |
---|
41 | basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing |
---|
42 | relative height, x-velocity and y-velocity, respectively. |
---|
43 | |
---|
44 | Also convert latitude and longitude to UTM. All coordinates are |
---|
45 | assumed to be given in the GDA94 datum. |
---|
46 | |
---|
47 | min's and max's: If omitted - full extend is used. |
---|
48 | To include a value min may equal it, while max must exceed it. |
---|
49 | Lat and lon are assuemd to be in decimal degrees |
---|
50 | |
---|
51 | origin is a 3-tuple with geo referenced |
---|
52 | UTM coordinates (zone, easting, northing) |
---|
53 | |
---|
54 | nc format has values organised as HA[TIME, LATITUDE, LONGITUDE] |
---|
55 | which means that longitude is the fastest |
---|
56 | varying dimension (row major order, so to speak) |
---|
57 | |
---|
58 | ferret2sww uses grid points as vertices in a triangular grid |
---|
59 | counting vertices from lower left corner upwards, then right |
---|
60 | """ |
---|
61 | |
---|
62 | from Scientific.IO.NetCDF import NetCDFFile |
---|
63 | |
---|
64 | _assert_lat_long(minlat, maxlat, minlon, maxlon) |
---|
65 | |
---|
66 | # Get NetCDF data |
---|
67 | if verbose: log.critical('Reading files %s_*.nc' % basename_in) |
---|
68 | |
---|
69 | # Wave amplitude (cm) |
---|
70 | file_h = NetCDFFile(basename_in + '_ha.nc', netcdf_mode_r) |
---|
71 | |
---|
72 | # Velocity (x) (cm/s) |
---|
73 | file_u = NetCDFFile(basename_in + '_ua.nc', netcdf_mode_r) |
---|
74 | |
---|
75 | # Velocity (y) (cm/s) |
---|
76 | file_v = NetCDFFile(basename_in + '_va.nc', netcdf_mode_r) |
---|
77 | |
---|
78 | # Elevation (z) (m) |
---|
79 | file_e = NetCDFFile(basename_in + '_e.nc', netcdf_mode_r) |
---|
80 | |
---|
81 | if basename_out is None: |
---|
82 | swwname = basename_in + '.sww' |
---|
83 | else: |
---|
84 | swwname = basename_out + '.sww' |
---|
85 | |
---|
86 | # Get dimensions of file_h |
---|
87 | for dimension in file_h.dimensions.keys(): |
---|
88 | if dimension[:3] == 'LON': |
---|
89 | dim_h_longitude = dimension |
---|
90 | if dimension[:3] == 'LAT': |
---|
91 | dim_h_latitude = dimension |
---|
92 | if dimension[:4] == 'TIME': |
---|
93 | dim_h_time = dimension |
---|
94 | |
---|
95 | times = file_h.variables[dim_h_time] |
---|
96 | latitudes = file_h.variables[dim_h_latitude] |
---|
97 | longitudes = file_h.variables[dim_h_longitude] |
---|
98 | |
---|
99 | kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], |
---|
100 | longitudes[:], |
---|
101 | minlat, maxlat, |
---|
102 | minlon, maxlon) |
---|
103 | # get dimensions for file_e |
---|
104 | for dimension in file_e.dimensions.keys(): |
---|
105 | if dimension[:3] == 'LON': |
---|
106 | dim_e_longitude = dimension |
---|
107 | if dimension[:3] == 'LAT': |
---|
108 | dim_e_latitude = dimension |
---|
109 | |
---|
110 | # get dimensions for file_u |
---|
111 | for dimension in file_u.dimensions.keys(): |
---|
112 | if dimension[:3] == 'LON': |
---|
113 | dim_u_longitude = dimension |
---|
114 | if dimension[:3] == 'LAT': |
---|
115 | dim_u_latitude = dimension |
---|
116 | |
---|
117 | # get dimensions for file_v |
---|
118 | for dimension in file_v.dimensions.keys(): |
---|
119 | if dimension[:3] == 'LON': |
---|
120 | dim_v_longitude = dimension |
---|
121 | if dimension[:3] == 'LAT': |
---|
122 | dim_v_latitude = dimension |
---|
123 | |
---|
124 | # Precision used by most for lat/lon is 4 or 5 decimals |
---|
125 | e_lat = num.around(file_e.variables[dim_e_latitude][:], 5) |
---|
126 | e_lon = num.around(file_e.variables[dim_e_longitude][:], 5) |
---|
127 | |
---|
128 | # Check that files are compatible |
---|
129 | assert num.allclose(latitudes, file_u.variables[dim_u_latitude]) |
---|
130 | assert num.allclose(latitudes, file_v.variables[dim_v_latitude]) |
---|
131 | assert num.allclose(latitudes, e_lat) |
---|
132 | |
---|
133 | assert num.allclose(longitudes, file_u.variables[dim_u_longitude]) |
---|
134 | assert num.allclose(longitudes, file_v.variables[dim_v_longitude]) |
---|
135 | assert num.allclose(longitudes, e_lon) |
---|
136 | |
---|
137 | if mint is None: |
---|
138 | jmin = 0 |
---|
139 | mint = times[0] |
---|
140 | else: |
---|
141 | jmin = num.searchsorted(times, mint) |
---|
142 | |
---|
143 | # numpy.int32 didn't work in slicing of amplitude below |
---|
144 | jmin = int(jmin) |
---|
145 | |
---|
146 | if maxt is None: |
---|
147 | jmax = len(times) |
---|
148 | maxt = times[-1] |
---|
149 | else: |
---|
150 | jmax = num.searchsorted(times, maxt) |
---|
151 | |
---|
152 | # numpy.int32 didn't work in slicing of amplitude below |
---|
153 | jmax = int(jmax) |
---|
154 | |
---|
155 | kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], |
---|
156 | longitudes[:], |
---|
157 | minlat, maxlat, |
---|
158 | minlon, maxlon) |
---|
159 | |
---|
160 | |
---|
161 | times = times[jmin:jmax] |
---|
162 | latitudes = latitudes[kmin:kmax] |
---|
163 | longitudes = longitudes[lmin:lmax] |
---|
164 | |
---|
165 | if verbose: log.critical('cropping') |
---|
166 | |
---|
167 | zname = 'ELEVATION' |
---|
168 | |
---|
169 | amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax] |
---|
170 | uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon |
---|
171 | vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat |
---|
172 | elevations = file_e.variables[zname][kmin:kmax, lmin:lmax] |
---|
173 | |
---|
174 | # Get missing values |
---|
175 | nan_ha = file_h.variables['HA'].missing_value[0] |
---|
176 | nan_ua = file_u.variables['UA'].missing_value[0] |
---|
177 | nan_va = file_v.variables['VA'].missing_value[0] |
---|
178 | if hasattr(file_e.variables[zname],'missing_value'): |
---|
179 | nan_e = file_e.variables[zname].missing_value[0] |
---|
180 | else: |
---|
181 | nan_e = None |
---|
182 | |
---|
183 | # Cleanup |
---|
184 | missing = (amplitudes == nan_ha) |
---|
185 | if num.sometrue (missing): |
---|
186 | if fail_on_NaN: |
---|
187 | msg = 'NetCDFFile %s contains missing values' \ |
---|
188 | % basename_in + '_ha.nc' |
---|
189 | raise DataMissingValuesError, msg |
---|
190 | else: |
---|
191 | amplitudes = amplitudes*(missing==0) + missing*NaN_filler |
---|
192 | |
---|
193 | missing = (uspeed == nan_ua) |
---|
194 | if num.sometrue (missing): |
---|
195 | if fail_on_NaN: |
---|
196 | msg = 'NetCDFFile %s contains missing values' \ |
---|
197 | % basename_in + '_ua.nc' |
---|
198 | raise DataMissingValuesError, msg |
---|
199 | else: |
---|
200 | uspeed = uspeed*(missing==0) + missing*NaN_filler |
---|
201 | |
---|
202 | missing = (vspeed == nan_va) |
---|
203 | if num.sometrue (missing): |
---|
204 | if fail_on_NaN: |
---|
205 | msg = 'NetCDFFile %s contains missing values' \ |
---|
206 | % basename_in + '_va.nc' |
---|
207 | raise DataMissingValuesError, msg |
---|
208 | else: |
---|
209 | vspeed = vspeed*(missing==0) + missing*NaN_filler |
---|
210 | |
---|
211 | missing = (elevations == nan_e) |
---|
212 | if num.sometrue (missing): |
---|
213 | if fail_on_NaN: |
---|
214 | msg = 'NetCDFFile %s contains missing values' \ |
---|
215 | % basename_in + '_e.nc' |
---|
216 | raise DataMissingValuesError, msg |
---|
217 | else: |
---|
218 | elevations = elevations*(missing==0) + missing*NaN_filler |
---|
219 | |
---|
220 | number_of_times = times.shape[0] |
---|
221 | number_of_latitudes = latitudes.shape[0] |
---|
222 | number_of_longitudes = longitudes.shape[0] |
---|
223 | |
---|
224 | assert amplitudes.shape[0] == number_of_times |
---|
225 | assert amplitudes.shape[1] == number_of_latitudes |
---|
226 | assert amplitudes.shape[2] == number_of_longitudes |
---|
227 | |
---|
228 | if verbose: |
---|
229 | _show_stats((latitudes, longitudes), times, amplitudes, \ |
---|
230 | (uspeed, vspeed), elevations) |
---|
231 | |
---|
232 | # print number_of_latitudes, number_of_longitudes |
---|
233 | number_of_points = number_of_latitudes * number_of_longitudes |
---|
234 | number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2 |
---|
235 | |
---|
236 | file_h.close() |
---|
237 | file_u.close() |
---|
238 | file_v.close() |
---|
239 | file_e.close() |
---|
240 | |
---|
241 | # NetCDF file definition |
---|
242 | outfile = NetCDFFile(swwname, netcdf_mode_w) |
---|
243 | |
---|
244 | description = 'Converted from Ferret files: %s, %s, %s, %s' \ |
---|
245 | % (basename_in + '_ha.nc', |
---|
246 | basename_in + '_ua.nc', |
---|
247 | basename_in + '_va.nc', |
---|
248 | basename_in + '_e.nc') |
---|
249 | |
---|
250 | # Create new file |
---|
251 | starttime = times[0] |
---|
252 | |
---|
253 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
254 | sww.store_header(outfile, times, number_of_volumes, |
---|
255 | number_of_points, description=description, |
---|
256 | verbose=verbose, sww_precision=netcdf_float) |
---|
257 | |
---|
258 | # Store |
---|
259 | from anuga.coordinate_transforms.redfearn import redfearn |
---|
260 | x = num.zeros(number_of_points, num.float) #Easting |
---|
261 | y = num.zeros(number_of_points, num.float) #Northing |
---|
262 | |
---|
263 | if verbose: |
---|
264 | log.critical('Making triangular grid') |
---|
265 | |
---|
266 | # Check zone boundaries |
---|
267 | refzone, _, _ = redfearn(latitudes[0], longitudes[0]) |
---|
268 | |
---|
269 | vertices = {} |
---|
270 | i = 0 |
---|
271 | for k, lat in enumerate(latitudes): # Y direction |
---|
272 | for l, lon in enumerate(longitudes): # X direction |
---|
273 | vertices[l, k] = i |
---|
274 | |
---|
275 | _, easting, northing = redfearn(lat, lon) |
---|
276 | |
---|
277 | #msg = 'Zone boundary crossed at longitude =', lon |
---|
278 | #assert zone == refzone, msg |
---|
279 | #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing) |
---|
280 | x[i] = easting |
---|
281 | y[i] = northing |
---|
282 | i += 1 |
---|
283 | |
---|
284 | #Construct 2 triangles per 'rectangular' element |
---|
285 | volumes = [] |
---|
286 | for l in range(number_of_longitudes-1): # X direction |
---|
287 | for k in range(number_of_latitudes-1): # Y direction |
---|
288 | v1 = vertices[l, k+1] |
---|
289 | v2 = vertices[l, k] |
---|
290 | v3 = vertices[l+1, k+1] |
---|
291 | v4 = vertices[l+1, k] |
---|
292 | |
---|
293 | volumes.append([v1, v2, v3]) #Upper element |
---|
294 | volumes.append([v4, v3, v2]) #Lower element |
---|
295 | |
---|
296 | volumes = num.array(volumes, num.int) #array default# |
---|
297 | |
---|
298 | if origin is None: |
---|
299 | origin = Geo_reference(refzone, min(x), min(y)) |
---|
300 | geo_ref = write_NetCDF_georeference(origin, outfile) |
---|
301 | |
---|
302 | if elevation is not None: |
---|
303 | z = elevation |
---|
304 | else: |
---|
305 | if inverted_bathymetry: |
---|
306 | z = -1 * elevations |
---|
307 | else: |
---|
308 | z = elevations |
---|
309 | #FIXME: z should be obtained from MOST and passed in here |
---|
310 | |
---|
311 | #FIXME use the Write_sww instance(sww) to write this info |
---|
312 | z = num.resize(z, outfile.variables['elevation'][:].shape) |
---|
313 | outfile.variables['x'][:] = x - geo_ref.get_xllcorner() |
---|
314 | outfile.variables['y'][:] = y - geo_ref.get_yllcorner() |
---|
315 | #outfile.variables['z'][:] = z #FIXME HACK for bacwards compat. |
---|
316 | outfile.variables['elevation'][:] = z |
---|
317 | outfile.variables['volumes'][:] = volumes.astype(num.int32) #For Opteron 64 |
---|
318 | |
---|
319 | #Time stepping |
---|
320 | stage = outfile.variables['stage'] |
---|
321 | xmomentum = outfile.variables['xmomentum'] |
---|
322 | ymomentum = outfile.variables['ymomentum'] |
---|
323 | |
---|
324 | if verbose: |
---|
325 | log.critical('Converting quantities') |
---|
326 | |
---|
327 | n = len(times) |
---|
328 | for j in range(n): |
---|
329 | if verbose and j % ((n+10)/10) == 0: |
---|
330 | log.critical(' Doing %d of %d' % (j, n)) |
---|
331 | |
---|
332 | i = 0 |
---|
333 | for k in range(number_of_latitudes): # Y direction |
---|
334 | for l in range(number_of_longitudes): # X direction |
---|
335 | w = zscale * amplitudes[j, k, l] / 100 + mean_stage |
---|
336 | stage[j, i] = w |
---|
337 | h = w - z[i] |
---|
338 | xmomentum[j, i] = uspeed[j, k, l]/100*h |
---|
339 | ymomentum[j, i] = vspeed[j, k, l]/100*h |
---|
340 | i += 1 |
---|
341 | |
---|
342 | #outfile.close() |
---|
343 | |
---|
344 | #FIXME: Refactor using code from file_function.statistics |
---|
345 | #Something like print swwstats(swwname) |
---|
346 | if verbose: |
---|
347 | time_info = times, starttime, mint, maxt |
---|
348 | _show_sww_stats(outfile, swwname, geo_ref, time_info) |
---|
349 | |
---|
350 | outfile.close() |
---|
351 | |
---|
352 | |
---|
353 | def _show_stats(latlong, times, amplitudes, speeds, elevations): |
---|
354 | """ Print the statistics nicely to the log file """ |
---|
355 | |
---|
356 | latitudes, longitudes = latlong |
---|
357 | uspeed, vspeed = speeds |
---|
358 | |
---|
359 | log.critical('------------------------------------------------') |
---|
360 | log.critical('Statistics:') |
---|
361 | log.critical(' Extent (lat/lon):') |
---|
362 | log.critical(' lat in [%f, %f], len(lat) == %d' |
---|
363 | % (num.min(latitudes), num.max(latitudes), |
---|
364 | len(latitudes.flat))) |
---|
365 | log.critical(' lon in [%f, %f], len(lon) == %d' |
---|
366 | % (num.min(longitudes), num.max(longitudes), |
---|
367 | len(longitudes.flat))) |
---|
368 | log.critical(' t in [%f, %f], len(t) == %d' |
---|
369 | % (num.min(times), num.max(times), len(times.flat))) |
---|
370 | |
---|
371 | name = 'Amplitudes (ha) [cm]' |
---|
372 | log.critical(' %s in [%f, %f]' |
---|
373 | % (name, num.min(amplitudes), num.max(amplitudes))) |
---|
374 | |
---|
375 | name = 'Speeds (ua) [cm/s]' |
---|
376 | log.critical(' %s in [%f, %f]' |
---|
377 | % (name, num.min(uspeed), num.max(uspeed))) |
---|
378 | |
---|
379 | name = 'Speeds (va) [cm/s]' |
---|
380 | log.critical(' %s in [%f, %f]' |
---|
381 | % (name, num.min(vspeed), num.max(vspeed))) |
---|
382 | |
---|
383 | name = 'Elevations (e) [m]' |
---|
384 | log.critical(' %s in [%f, %f]' |
---|
385 | % (name, num.min(elevations), num.max(elevations))) |
---|
386 | |
---|
387 | |
---|
388 | |
---|
389 | def _show_sww_stats(outfile, swwname, geo_ref, time_info): |
---|
390 | """ Log SWW output stats. """ |
---|
391 | times, starttime, mint, maxt = time_info |
---|
392 | x = outfile.variables['x'][:] |
---|
393 | y = outfile.variables['y'][:] |
---|
394 | log.critical('------------------------------------------------') |
---|
395 | log.critical('Statistics of output file:') |
---|
396 | log.critical(' Name: %s' %swwname) |
---|
397 | log.critical(' Reference:') |
---|
398 | log.critical(' Lower left corner: [%f, %f]' |
---|
399 | % (geo_ref.get_xllcorner(), geo_ref.get_yllcorner())) |
---|
400 | log.critical(' Start time: %f' %starttime) |
---|
401 | log.critical(' Min time: %f' %mint) |
---|
402 | log.critical(' Max time: %f' %maxt) |
---|
403 | log.critical(' Extent:') |
---|
404 | log.critical(' x [m] in [%f, %f], len(x) == %d' |
---|
405 | % (num.min(x), num.max(x), len(x.flat))) |
---|
406 | log.critical(' y [m] in [%f, %f], len(y) == %d' |
---|
407 | % (num.min(y), num.max(y), len(y.flat))) |
---|
408 | log.critical(' t [s] in [%f, %f], len(t) == %d' |
---|
409 | % (min(times), max(times), len(times))) |
---|
410 | log.critical(' Quantities [SI units]:') |
---|
411 | for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']: |
---|
412 | q = outfile.variables[name][:] # .flatten() |
---|
413 | log.critical(' %s in [%f, %f]' % \ |
---|
414 | (name, num.min(q), num.max(q))) |
---|
415 | |
---|
416 | |
---|
417 | def _assert_lat_long(minlat, maxlat, minlon, maxlon): |
---|
418 | """Check latitudes and longitudes for validity.""" |
---|
419 | |
---|
420 | msg = 'Must use latitudes and longitudes for minlat, maxlon etc' |
---|
421 | |
---|
422 | if minlat != None: |
---|
423 | assert -90 < minlat < 90 , msg |
---|
424 | if maxlat != None: |
---|
425 | assert -90 < maxlat < 90 , msg |
---|
426 | if minlat != None: |
---|
427 | assert maxlat > minlat |
---|
428 | if minlon != None: |
---|
429 | assert -180 < minlon < 180 , msg |
---|
430 | if maxlon != None: |
---|
431 | assert -180 < maxlon < 180 , msg |
---|
432 | if minlon != None: |
---|
433 | assert maxlon > minlon |
---|