1 | """ |
---|
2 | Merge a list of .sww files together into a single file. |
---|
3 | """ |
---|
4 | |
---|
5 | import numpy as num |
---|
6 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
7 | |
---|
8 | from Scientific.IO.NetCDF import NetCDFFile |
---|
9 | from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a |
---|
10 | from anuga.config import netcdf_float, netcdf_float32, netcdf_int |
---|
11 | from anuga.file.sww import SWW_file, Write_sww |
---|
12 | |
---|
13 | def sww_merge(domain_global_name, np, verbose=False): |
---|
14 | |
---|
15 | output = domain_global_name+".sww" |
---|
16 | swwfiles = [ domain_global_name+"_P"+str(np)+"_"+str(v)+".sww" for v in range(np)] |
---|
17 | |
---|
18 | _sww_merge(swwfiles, output, verbose) |
---|
19 | |
---|
20 | |
---|
21 | def sww_merge_parallel(domain_global_name, np, verbose=False, delete_old=False): |
---|
22 | |
---|
23 | output = domain_global_name+".sww" |
---|
24 | swwfiles = [ domain_global_name+"_P"+str(np)+"_"+str(v)+".sww" for v in range(np)] |
---|
25 | |
---|
26 | _sww_merge_parallel(swwfiles, output, verbose, delete_old) |
---|
27 | |
---|
28 | |
---|
29 | def _sww_merge(swwfiles, output, verbose=False): |
---|
30 | """ |
---|
31 | Merge a list of sww files into a single file. |
---|
32 | |
---|
33 | May be useful for parallel runs. Note that colinear points and |
---|
34 | edges are not merged: there will essentially be multiple meshes within |
---|
35 | the one sww file. |
---|
36 | |
---|
37 | The sww files to be merged must have exactly the same timesteps. Note |
---|
38 | that some advanced information and custom quantities may not be |
---|
39 | exported. |
---|
40 | |
---|
41 | swwfiles is a list of .sww files to merge. |
---|
42 | output is the output filename, including .sww extension. |
---|
43 | verbose True to log output information |
---|
44 | """ |
---|
45 | |
---|
46 | if verbose: |
---|
47 | print "MERGING SWW Files" |
---|
48 | |
---|
49 | static_quantities = ['elevation'] |
---|
50 | dynamic_quantities = ['stage', 'xmomentum', 'ymomentum'] |
---|
51 | |
---|
52 | first_file = True |
---|
53 | tri_offset = 0 |
---|
54 | for filename in swwfiles: |
---|
55 | if verbose: |
---|
56 | print 'Reading file ', filename, ':' |
---|
57 | |
---|
58 | fid = NetCDFFile(filename, netcdf_mode_r) |
---|
59 | |
---|
60 | |
---|
61 | |
---|
62 | tris = fid.variables['volumes'][:] |
---|
63 | |
---|
64 | if first_file: |
---|
65 | times = fid.variables['time'][:] |
---|
66 | x = [] |
---|
67 | y = [] |
---|
68 | out_tris = list(tris) |
---|
69 | out_s_quantities = {} |
---|
70 | out_d_quantities = {} |
---|
71 | |
---|
72 | |
---|
73 | xllcorner = fid.xllcorner |
---|
74 | yllcorner = fid.yllcorner |
---|
75 | |
---|
76 | order = fid.order |
---|
77 | xllcorner = fid.xllcorner; |
---|
78 | yllcorner = fid.yllcorner ; |
---|
79 | zone = fid.zone; |
---|
80 | false_easting = fid.false_easting; |
---|
81 | false_northing = fid.false_northing; |
---|
82 | datum = fid.datum; |
---|
83 | projection = fid.projection; |
---|
84 | |
---|
85 | |
---|
86 | for quantity in static_quantities: |
---|
87 | out_s_quantities[quantity] = [] |
---|
88 | |
---|
89 | # Quantities are stored as a 2D array of timesteps x data. |
---|
90 | for quantity in dynamic_quantities: |
---|
91 | out_d_quantities[quantity] = [ [] for _ in range(len(times))] |
---|
92 | |
---|
93 | description = 'merged:' + getattr(fid, 'description') |
---|
94 | first_file = False |
---|
95 | else: |
---|
96 | for tri in tris: |
---|
97 | # Advance new tri indices to point at newly appended points. |
---|
98 | verts = [vertex+tri_offset for vertex in tri] |
---|
99 | out_tris.append(verts) |
---|
100 | |
---|
101 | num_pts = fid.dimensions['number_of_points'] |
---|
102 | tri_offset += num_pts |
---|
103 | |
---|
104 | if verbose: |
---|
105 | print ' new triangle index offset is ', tri_offset |
---|
106 | |
---|
107 | x.extend(list(fid.variables['x'][:])) |
---|
108 | y.extend(list(fid.variables['y'][:])) |
---|
109 | |
---|
110 | # Grow the list of static quantities associated with the x,y points |
---|
111 | for quantity in static_quantities: |
---|
112 | out_s_quantities[quantity].extend(fid.variables[quantity][:]) |
---|
113 | |
---|
114 | #Collate all dynamic quantities according to their timestep |
---|
115 | for quantity in dynamic_quantities: |
---|
116 | time_chunks = fid.variables[quantity][:] |
---|
117 | for i, time_chunk in enumerate(time_chunks): |
---|
118 | out_d_quantities[quantity][i].extend(time_chunk) |
---|
119 | |
---|
120 | # Mash all points into a single big list |
---|
121 | points = [[xx, yy] for xx, yy in zip(x, y)] |
---|
122 | |
---|
123 | points = num.asarray(points).astype(netcdf_float32) |
---|
124 | |
---|
125 | fid.close() |
---|
126 | |
---|
127 | #--------------------------- |
---|
128 | # Write out the SWW file |
---|
129 | #--------------------------- |
---|
130 | |
---|
131 | if verbose: |
---|
132 | print 'Writing file ', output, ':' |
---|
133 | fido = NetCDFFile(output, netcdf_mode_w) |
---|
134 | sww = Write_sww(static_quantities, dynamic_quantities) |
---|
135 | sww.store_header(fido, times, |
---|
136 | len(out_tris), |
---|
137 | len(points), |
---|
138 | description=description, |
---|
139 | sww_precision=netcdf_float32) |
---|
140 | |
---|
141 | |
---|
142 | |
---|
143 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
144 | geo_reference = Geo_reference() |
---|
145 | |
---|
146 | sww.store_triangulation(fido, points, out_tris, points_georeference=geo_reference) |
---|
147 | |
---|
148 | fido.order = order |
---|
149 | fido.xllcorner = xllcorner; |
---|
150 | fido.yllcorner = yllcorner ; |
---|
151 | fido.zone = zone; |
---|
152 | fido.false_easting = false_easting; |
---|
153 | fido.false_northing = false_northing; |
---|
154 | fido.datum = datum; |
---|
155 | fido.projection = projection; |
---|
156 | |
---|
157 | sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities) |
---|
158 | |
---|
159 | # Write out all the dynamic quantities for each timestep |
---|
160 | for q in dynamic_quantities: |
---|
161 | q_values = out_d_quantities[q] |
---|
162 | for i, time_slice in enumerate(q_values): |
---|
163 | fido.variables[q][i] = num.array(time_slice, netcdf_float32) |
---|
164 | |
---|
165 | # This updates the _range values |
---|
166 | q_range = fido.variables[q + Write_sww.RANGE][:] |
---|
167 | q_values_min = num.min(q_values) |
---|
168 | if q_values_min < q_range[0]: |
---|
169 | fido.variables[q + Write_sww.RANGE][0] = q_values_min |
---|
170 | q_values_max = num.max(q_values) |
---|
171 | if q_values_max > q_range[1]: |
---|
172 | fido.variables[q + Write_sww.RANGE][1] = q_values_max |
---|
173 | |
---|
174 | |
---|
175 | fido.close() |
---|
176 | |
---|
177 | |
---|
178 | def _sww_merge_parallel(swwfiles, output, verbose=False, delete_old=False): |
---|
179 | """ |
---|
180 | Merge a list of sww files into a single file. |
---|
181 | |
---|
182 | Use to merge files created by parallel runs. |
---|
183 | |
---|
184 | The sww files to be merged must have exactly the same timesteps. |
---|
185 | |
---|
186 | Note that some advanced information and custom quantities may not be |
---|
187 | exported. |
---|
188 | |
---|
189 | swwfiles is a list of .sww files to merge. |
---|
190 | output is the output filename, including .sww extension. |
---|
191 | verbose True to log output information |
---|
192 | """ |
---|
193 | |
---|
194 | if verbose: |
---|
195 | print "MERGING SWW Files" |
---|
196 | |
---|
197 | |
---|
198 | first_file = True |
---|
199 | tri_offset = 0 |
---|
200 | for filename in swwfiles: |
---|
201 | if verbose: |
---|
202 | print 'Reading file ', filename, ':' |
---|
203 | |
---|
204 | fid = NetCDFFile(filename, netcdf_mode_r) |
---|
205 | |
---|
206 | if first_file: |
---|
207 | |
---|
208 | times = fid.variables['time'][:] |
---|
209 | n_steps = len(times) |
---|
210 | number_of_timesteps = fid.dimensions['number_of_timesteps'] |
---|
211 | #print n_steps, number_of_timesteps |
---|
212 | starttime = int(fid.starttime) |
---|
213 | |
---|
214 | out_s_quantities = {} |
---|
215 | out_d_quantities = {} |
---|
216 | |
---|
217 | |
---|
218 | xllcorner = fid.xllcorner |
---|
219 | yllcorner = fid.yllcorner |
---|
220 | |
---|
221 | number_of_global_triangles = int(fid.number_of_global_triangles) |
---|
222 | number_of_global_nodes = int(fid.number_of_global_nodes) |
---|
223 | |
---|
224 | order = fid.order |
---|
225 | xllcorner = fid.xllcorner; |
---|
226 | yllcorner = fid.yllcorner ; |
---|
227 | zone = fid.zone; |
---|
228 | false_easting = fid.false_easting; |
---|
229 | false_northing = fid.false_northing; |
---|
230 | datum = fid.datum; |
---|
231 | projection = fid.projection; |
---|
232 | |
---|
233 | g_volumes = num.zeros((number_of_global_triangles,3),num.int) |
---|
234 | g_x = num.zeros((number_of_global_nodes,),num.float32) |
---|
235 | g_y = num.zeros((number_of_global_nodes,),num.float32) |
---|
236 | |
---|
237 | g_points = num.zeros((number_of_global_nodes,2),num.float32) |
---|
238 | |
---|
239 | quantities = ['elevation', 'stage', 'xmomentum', 'ymomentum'] |
---|
240 | static_quantities = [] |
---|
241 | dynamic_quantities = [] |
---|
242 | |
---|
243 | for quantity in quantities: |
---|
244 | # Test if elevation is static |
---|
245 | if n_steps == fid.variables[quantity].shape[0]: |
---|
246 | dynamic_quantities.append(quantity) |
---|
247 | else: |
---|
248 | static_quantities.append(quantity) |
---|
249 | |
---|
250 | for quantity in static_quantities: |
---|
251 | out_s_quantities[quantity] = num.zeros((number_of_global_nodes,),num.float32) |
---|
252 | |
---|
253 | # Quantities are stored as a 2D array of timesteps x data. |
---|
254 | for quantity in dynamic_quantities: |
---|
255 | out_d_quantities[quantity] = \ |
---|
256 | num.zeros((n_steps,number_of_global_nodes),num.float32) |
---|
257 | |
---|
258 | description = 'merged:' + getattr(fid, 'description') |
---|
259 | first_file = False |
---|
260 | |
---|
261 | |
---|
262 | # Read in from files and add to global arrays |
---|
263 | |
---|
264 | tri_l2g = fid.variables['tri_l2g'][:] |
---|
265 | node_l2g = fid.variables['node_l2g'][:] |
---|
266 | tri_full_flag = fid.variables['tri_full_flag'][:] |
---|
267 | volumes = num.array(fid.variables['volumes'][:],dtype=num.int) |
---|
268 | l_volumes = num.zeros_like(volumes) |
---|
269 | l_old_volumes = num.zeros_like(volumes) |
---|
270 | |
---|
271 | |
---|
272 | # Change the local node ids to global id in the |
---|
273 | # volume array |
---|
274 | |
---|
275 | # FIXME SR: Surely we can knock up a numpy way of doing this |
---|
276 | #for i in range(len(l_volumes)): |
---|
277 | # g_n0 = node_l2g[volumes[i,0]] |
---|
278 | # g_n1 = node_l2g[volumes[i,1]] |
---|
279 | # g_n2 = node_l2g[volumes[i,2]] |
---|
280 | # |
---|
281 | # l_old_volumes[i,:] = [g_n0,g_n1,g_n2] |
---|
282 | |
---|
283 | g_n0 = node_l2g[volumes[:,0]].reshape(-1,1) |
---|
284 | g_n1 = node_l2g[volumes[:,1]].reshape(-1,1) |
---|
285 | g_n2 = node_l2g[volumes[:,2]].reshape(-1,1) |
---|
286 | |
---|
287 | #print g_n0.shape |
---|
288 | l_volumes = num.hstack((g_n0,g_n1,g_n2)) |
---|
289 | |
---|
290 | #assert num.allclose(l_volumes, l_old_volumes) |
---|
291 | |
---|
292 | # Just pick out the full triangles |
---|
293 | ftri_l2g = num.compress(tri_full_flag, tri_l2g) |
---|
294 | |
---|
295 | #print l_volumes |
---|
296 | #print tri_full_flag |
---|
297 | #print tri_l2g |
---|
298 | #print ftri_l2g |
---|
299 | |
---|
300 | fg_volumes = num.compress(tri_full_flag,l_volumes,axis=0) |
---|
301 | g_volumes[ftri_l2g] = fg_volumes |
---|
302 | |
---|
303 | |
---|
304 | |
---|
305 | |
---|
306 | #g_x[node_l2g] = fid.variables['x'] |
---|
307 | #g_y[node_l2g] = fid.variables['y'] |
---|
308 | |
---|
309 | g_points[node_l2g,0] = fid.variables['x'] |
---|
310 | g_points[node_l2g,1] = fid.variables['y'] |
---|
311 | |
---|
312 | |
---|
313 | #print number_of_timesteps |
---|
314 | |
---|
315 | |
---|
316 | # FIXME SR: It seems that some of the "ghost" node quantity values |
---|
317 | # are being storded. We should only store those nodes which are associated with |
---|
318 | # full triangles. So we need an index array of "full" nodes, ie those in |
---|
319 | # full triangles |
---|
320 | |
---|
321 | #use numpy.compress and numpy.unique to get "full nodes |
---|
322 | |
---|
323 | f_volumes = num.compress(tri_full_flag,volumes,axis=0) |
---|
324 | fl_nodes = num.unique(f_volumes) |
---|
325 | f_node_l2g = node_l2g[fl_nodes] |
---|
326 | |
---|
327 | #print len(node_l2g) |
---|
328 | #print len(fl_nodes) |
---|
329 | |
---|
330 | # Read in static quantities |
---|
331 | for quantity in static_quantities: |
---|
332 | #out_s_quantities[quantity][node_l2g] = \ |
---|
333 | # num.array(fid.variables[quantity],dtype=num.float32) |
---|
334 | q = fid.variables[quantity] |
---|
335 | #print quantity, q.shape |
---|
336 | out_s_quantities[quantity][f_node_l2g] = \ |
---|
337 | num.array(q,dtype=num.float32)[fl_nodes] |
---|
338 | |
---|
339 | |
---|
340 | #Collate all dynamic quantities according to their timestep |
---|
341 | for quantity in dynamic_quantities: |
---|
342 | q = fid.variables[quantity] |
---|
343 | #print q.shape |
---|
344 | for i in range(n_steps): |
---|
345 | #out_d_quantities[quantity][i][node_l2g] = \ |
---|
346 | # num.array(q[i],dtype=num.float32) |
---|
347 | out_d_quantities[quantity][i][f_node_l2g] = \ |
---|
348 | num.array(q[i],dtype=num.float32)[fl_nodes] |
---|
349 | |
---|
350 | |
---|
351 | |
---|
352 | |
---|
353 | fid.close() |
---|
354 | |
---|
355 | |
---|
356 | #--------------------------- |
---|
357 | # Write out the SWW file |
---|
358 | #--------------------------- |
---|
359 | #print g_points.shape |
---|
360 | |
---|
361 | #print number_of_global_triangles |
---|
362 | #print number_of_global_nodes |
---|
363 | |
---|
364 | |
---|
365 | if verbose: |
---|
366 | print 'Writing file ', output, ':' |
---|
367 | fido = NetCDFFile(output, netcdf_mode_w) |
---|
368 | sww = Write_sww(static_quantities, dynamic_quantities) |
---|
369 | sww.store_header(fido, starttime, |
---|
370 | number_of_global_triangles, |
---|
371 | number_of_global_nodes, |
---|
372 | description=description, |
---|
373 | sww_precision=netcdf_float32) |
---|
374 | |
---|
375 | |
---|
376 | |
---|
377 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
378 | geo_reference = Geo_reference() |
---|
379 | |
---|
380 | sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference) |
---|
381 | |
---|
382 | fido.order = order |
---|
383 | fido.xllcorner = xllcorner; |
---|
384 | fido.yllcorner = yllcorner ; |
---|
385 | fido.zone = zone; |
---|
386 | fido.false_easting = false_easting; |
---|
387 | fido.false_northing = false_northing; |
---|
388 | fido.datum = datum; |
---|
389 | fido.projection = projection; |
---|
390 | |
---|
391 | sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities) |
---|
392 | |
---|
393 | # Write out all the dynamic quantities for each timestep |
---|
394 | |
---|
395 | for i in range(n_steps): |
---|
396 | fido.variables['time'][i] = times[i] |
---|
397 | |
---|
398 | for q in dynamic_quantities: |
---|
399 | q_values = out_d_quantities[q] |
---|
400 | for i in range(n_steps): |
---|
401 | fido.variables[q][i] = q_values[i] |
---|
402 | |
---|
403 | # This updates the _range values |
---|
404 | q_range = fido.variables[q + Write_sww.RANGE][:] |
---|
405 | q_values_min = num.min(q_values) |
---|
406 | if q_values_min < q_range[0]: |
---|
407 | fido.variables[q + Write_sww.RANGE][0] = q_values_min |
---|
408 | q_values_max = num.max(q_values) |
---|
409 | if q_values_max > q_range[1]: |
---|
410 | fido.variables[q + Write_sww.RANGE][1] = q_values_max |
---|
411 | |
---|
412 | |
---|
413 | #print out_s_quantities |
---|
414 | #print out_d_quantities |
---|
415 | |
---|
416 | #print g_x |
---|
417 | #print g_y |
---|
418 | |
---|
419 | #print g_volumes |
---|
420 | |
---|
421 | fido.close() |
---|
422 | |
---|
423 | if delete_old: |
---|
424 | import os |
---|
425 | for filename in swwfiles: |
---|
426 | |
---|
427 | if verbose: |
---|
428 | print 'Deleting file ', filename, ':' |
---|
429 | os.remove(filename) |
---|
430 | |
---|
431 | if __name__ == "__main__": |
---|
432 | |
---|
433 | import argparse |
---|
434 | from anuga.anuga_exceptions import ANUGAError |
---|
435 | |
---|
436 | |
---|
437 | parser = argparse.ArgumentParser(description='Merge sww files created from parallel run') |
---|
438 | parser.add_argument('-np', type=int, default = 4, |
---|
439 | help='number of processors used to produce sww files') |
---|
440 | parser.add_argument('-f', type=str, default="domain", |
---|
441 | help='domain global name') |
---|
442 | parser.add_argument('-v', nargs='?', type=bool, const=True, default=False, |
---|
443 | help='verbosity') |
---|
444 | parser.add_argument('-delete_old', nargs='?', type=bool, const=True, default=False, |
---|
445 | help='Flag to delete the input files') |
---|
446 | args = parser.parse_args() |
---|
447 | |
---|
448 | np = args.np |
---|
449 | domain_global_name = args.f |
---|
450 | verbose = args.v |
---|
451 | delete_old = args.delete_old |
---|
452 | |
---|
453 | |
---|
454 | try: |
---|
455 | sww_merge_parallel(domain_global_name, np, verbose, delete_old) |
---|
456 | except: |
---|
457 | msg = 'ERROR: When merging sww files %s '% domain_global_name |
---|
458 | print msg |
---|
459 | raise |
---|