1 | """ |
---|
2 | Merge a list of .sww files together into a single file. |
---|
3 | """ |
---|
4 | |
---|
5 | import numpy as num |
---|
6 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
7 | |
---|
8 | from Scientific.IO.NetCDF import NetCDFFile |
---|
9 | from anuga.config import netcdf_mode_r, netcdf_mode_w, \ |
---|
10 | netcdf_mode_a, netcdf_float |
---|
11 | from anuga.file.sww import SWW_file, Write_sww |
---|
12 | |
---|
13 | |
---|
14 | def sww_merge(swwfiles, output, verbose = False): |
---|
15 | """ |
---|
16 | Merge a list of sww files into a single file. |
---|
17 | |
---|
18 | May be useful for parallel runs. Note that colinear points and |
---|
19 | edges are not merged: there will essentially be multiple meshes within |
---|
20 | the one sww file. |
---|
21 | |
---|
22 | The sww files to be merged must have exactly the same timesteps. Note |
---|
23 | that some advanced information and custom quantities may not be |
---|
24 | exported. |
---|
25 | |
---|
26 | swwfiles is a list of .sww files to merge. |
---|
27 | output is the output filename, including .sww extension. |
---|
28 | verbose True to log output information |
---|
29 | """ |
---|
30 | |
---|
31 | static_quantities = ['elevation'] |
---|
32 | dynamic_quantities = ['stage', 'xmomentum', 'ymomentum'] |
---|
33 | |
---|
34 | first_file = True |
---|
35 | tri_offset = 0 |
---|
36 | for filename in swwfiles: |
---|
37 | if verbose: |
---|
38 | print 'Reading file ', filename, ':' |
---|
39 | |
---|
40 | fid = NetCDFFile(filename, netcdf_mode_r) |
---|
41 | tris = fid.variables['volumes'][:] |
---|
42 | |
---|
43 | if first_file: |
---|
44 | times = fid.variables['time'][:] |
---|
45 | x = [] |
---|
46 | y = [] |
---|
47 | out_tris = list(tris) |
---|
48 | out_s_quantities = {} |
---|
49 | out_d_quantities = {} |
---|
50 | |
---|
51 | for quantity in static_quantities: |
---|
52 | out_s_quantities[quantity] = [] |
---|
53 | |
---|
54 | # Quantities are stored as a 2D array of timesteps x data. |
---|
55 | for quantity in dynamic_quantities: |
---|
56 | out_d_quantities[quantity] = [ [] for _ in range(len(times))] |
---|
57 | |
---|
58 | description = 'merged:' + getattr(fid, 'description') |
---|
59 | first_file = False |
---|
60 | else: |
---|
61 | for tri in tris: |
---|
62 | # Advance new tri indices to point at newly appended points. |
---|
63 | verts = [vertex+tri_offset for vertex in tri] |
---|
64 | out_tris.append(verts) |
---|
65 | |
---|
66 | num_pts = fid.dimensions['number_of_points'] |
---|
67 | tri_offset += num_pts |
---|
68 | |
---|
69 | if verbose: |
---|
70 | print ' new triangle index offset is ', tri_offset |
---|
71 | |
---|
72 | x.extend(list(fid.variables['x'][:])) |
---|
73 | y.extend(list(fid.variables['y'][:])) |
---|
74 | |
---|
75 | # Grow the list of static quantities associated with the x,y points |
---|
76 | for quantity in static_quantities: |
---|
77 | out_s_quantities[quantity].extend(fid.variables[quantity][:]) |
---|
78 | |
---|
79 | #Collate all dynamic quantities according to their timestep |
---|
80 | for quantity in dynamic_quantities: |
---|
81 | time_chunks = fid.variables[quantity][:] |
---|
82 | for i, time_chunk in enumerate(time_chunks): |
---|
83 | out_d_quantities[quantity][i].extend(time_chunk) |
---|
84 | |
---|
85 | # Mash all points into a single big list |
---|
86 | points = [[xx, yy] for xx, yy in zip(x, y)] |
---|
87 | fid.close() |
---|
88 | |
---|
89 | # Write out the SWW file |
---|
90 | fido = NetCDFFile(output, netcdf_mode_w) |
---|
91 | sww = Write_sww(static_quantities, dynamic_quantities) |
---|
92 | sww.store_header(fido, times, |
---|
93 | len(out_tris), |
---|
94 | len(points), |
---|
95 | description=description, |
---|
96 | sww_precision=netcdf_float) |
---|
97 | |
---|
98 | |
---|
99 | sww.store_triangulation(fido, points, out_tris) |
---|
100 | |
---|
101 | sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities) |
---|
102 | |
---|
103 | # Write out all the dynamic quantities for each timestep |
---|
104 | for q in dynamic_quantities: |
---|
105 | q_values = out_d_quantities[q] |
---|
106 | for i, time_slice in enumerate(q_values): |
---|
107 | fido.variables[q][i] = num.array(time_slice, netcdf_float) |
---|
108 | |
---|
109 | # This updates the _range values |
---|
110 | q_range = fido.variables[q + Write_sww.RANGE][:] |
---|
111 | q_values_min = num.min(q_values) |
---|
112 | if q_values_min < q_range[0]: |
---|
113 | fido.variables[q + Write_sww.RANGE][0] = q_values_min |
---|
114 | q_values_max = num.max(q_values) |
---|
115 | if q_values_max > q_range[1]: |
---|
116 | fido.variables[q + Write_sww.RANGE][1] = q_values_max |
---|
117 | |
---|
118 | |
---|
119 | fido.close() |
---|
120 | |
---|
121 | |
---|
122 | if __name__ == "__main__": |
---|
123 | |
---|
124 | import argparse |
---|
125 | from anuga.anuga_exceptions import ANUGAError |
---|
126 | |
---|
127 | |
---|
128 | parser = argparse.ArgumentParser(description='Merge sww files created from parallel run') |
---|
129 | parser.add_argument('-np', type=int, default = 4, |
---|
130 | help='number of processors used to produce sww files') |
---|
131 | parser.add_argument('-f', type=str, default="domain", |
---|
132 | help='base sww file name') |
---|
133 | parser.add_argument('-v', nargs='?', type=bool, const=True, default=False, |
---|
134 | help='verbosity') |
---|
135 | |
---|
136 | args = parser.parse_args() |
---|
137 | |
---|
138 | np = args.np |
---|
139 | filebase = args.f |
---|
140 | verbose = args.v |
---|
141 | |
---|
142 | #print np |
---|
143 | #print filebase |
---|
144 | #print verbose |
---|
145 | |
---|
146 | |
---|
147 | |
---|
148 | output = filebase+".sww" |
---|
149 | swwfiles = [ filebase+"_P"+str(v)+"_"+str(np)+".sww" for v in range(np)] |
---|
150 | |
---|
151 | #print swwfiles |
---|
152 | |
---|
153 | try: |
---|
154 | sww_merge(swwfiles, output, verbose) |
---|
155 | except: |
---|
156 | msg = 'ERROR: When merging sww files '+" ".join(swwfiles) |
---|
157 | print msg |
---|
158 | raise |
---|