source: trunk/anuga_core/source/anuga/utilities/sww_merge.py @ 8292

Last change on this file since 8292 was 8292, checked in by steve, 12 years ago

Just added default values to sww_merge

File size: 13.3 KB
RevLine 
[7820]1"""
2    Merge a list of .sww files together into a single file.
3"""
4
[7817]5import numpy as num
[7819]6from anuga.utilities.numerical_tools import ensure_numeric
[7817]7
8from Scientific.IO.NetCDF import NetCDFFile
[8277]9from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
10from anuga.config import netcdf_float, netcdf_float32, netcdf_int
[7817]11from anuga.file.sww import SWW_file, Write_sww
12
[8273]13def sww_merge(domain_global_name, np, verbose=False):
[7819]14
[8273]15    output = domain_global_name+".sww"
[8281]16    swwfiles = [ domain_global_name+"_P"+str(np)+"_"+str(v)+".sww" for v in range(np)]
[8273]17
18    _sww_merge(swwfiles, output, verbose)
19
20
[8291]21def sww_merge_parallel(domain_global_name, np, verbose=False, delete_old=False):
[8284]22
23    output = domain_global_name+".sww"
24    swwfiles = [ domain_global_name+"_P"+str(np)+"_"+str(v)+".sww" for v in range(np)]
25
[8291]26    _sww_merge_parallel(swwfiles, output, verbose, delete_old)
[8284]27
28
[8292]29def _sww_merge(swwfiles, output, verbose=False):
[7817]30    """
31        Merge a list of sww files into a single file.
[7820]32       
[7872]33        May be useful for parallel runs. Note that colinear points and
34        edges are not merged: there will essentially be multiple meshes within
35        the one sww file.
[7822]36       
[7872]37        The sww files to be merged must have exactly the same timesteps. Note
38        that some advanced information and custom quantities may not be
39        exported.
[7822]40       
41        swwfiles is a list of .sww files to merge.
42        output is the output filename, including .sww extension.
43        verbose True to log output information
[7817]44    """
[8273]45
46    if verbose:
47        print "MERGING SWW Files"
48       
[7819]49    static_quantities = ['elevation']
50    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
51   
[7817]52    first_file = True
53    tri_offset = 0
54    for filename in swwfiles:
55        if verbose:
56            print 'Reading file ', filename, ':'   
57   
58        fid = NetCDFFile(filename, netcdf_mode_r)
[8277]59
60       
61       
[7817]62        tris = fid.variables['volumes'][:]       
63         
64        if first_file:
65            times = fid.variables['time'][:]
66            x = []
67            y = []
68            out_tris = list(tris) 
[7819]69            out_s_quantities = {}
70            out_d_quantities = {}
[8277]71
72
73            xllcorner = fid.xllcorner
74            yllcorner = fid.yllcorner
75
76            order      = fid.order
77            xllcorner  = fid.xllcorner;
78            yllcorner  = fid.yllcorner ;
79            zone       = fid.zone;
80            false_easting  = fid.false_easting;
81            false_northing = fid.false_northing;
82            datum      = fid.datum;
83            projection = fid.projection;
84
[7819]85           
86            for quantity in static_quantities:
87                out_s_quantities[quantity] = []
88
[7822]89            # Quantities are stored as a 2D array of timesteps x data.
[7819]90            for quantity in dynamic_quantities:
91                out_d_quantities[quantity] = [ [] for _ in range(len(times))]
92                 
[7817]93            description = 'merged:' + getattr(fid, 'description')         
94            first_file = False
95        else:
96            for tri in tris:
[7822]97                # Advance new tri indices to point at newly appended points.
[7817]98                verts = [vertex+tri_offset for vertex in tri]
99                out_tris.append(verts)
100
[7819]101        num_pts = fid.dimensions['number_of_points']
102        tri_offset += num_pts
[7817]103       
104        if verbose:
105            print '  new triangle index offset is ', tri_offset
106           
107        x.extend(list(fid.variables['x'][:]))
108        y.extend(list(fid.variables['y'][:]))
[7819]109       
[7822]110        # Grow the list of static quantities associated with the x,y points
[7819]111        for quantity in static_quantities:
112            out_s_quantities[quantity].extend(fid.variables[quantity][:])
113           
[7822]114        #Collate all dynamic quantities according to their timestep
[7819]115        for quantity in dynamic_quantities:
116            time_chunks = fid.variables[quantity][:]
117            for i, time_chunk in enumerate(time_chunks):
118                out_d_quantities[quantity][i].extend(time_chunk)           
[7822]119   
120    # Mash all points into a single big list   
[7819]121    points = [[xx, yy] for xx, yy in zip(x, y)]
[8277]122
[8278]123    points = num.asarray(points).astype(netcdf_float32)
124
[7819]125    fid.close()
[8277]126
127    #---------------------------
[7822]128    # Write out the SWW file
[8277]129    #---------------------------
[8273]130
131    if verbose:
[8284]132        print 'Writing file ', output, ':'
[7817]133    fido = NetCDFFile(output, netcdf_mode_w)
[7819]134    sww = Write_sww(static_quantities, dynamic_quantities)
[7817]135    sww.store_header(fido, times,
136                             len(out_tris),
137                             len(points),
[7819]138                             description=description,
[8277]139                             sww_precision=netcdf_float32)
[7817]140
141
[8281]142
143    from anuga.coordinate_transforms.geo_reference import Geo_reference
144    geo_reference = Geo_reference()
[8277]145   
[8281]146    sww.store_triangulation(fido, points, out_tris, points_georeference=geo_reference)
[8277]147
148    fido.order      = order
149    fido.xllcorner  = xllcorner;
150    fido.yllcorner  = yllcorner ;
151    fido.zone       = zone;
152    fido.false_easting  = false_easting;
153    fido.false_northing = false_northing;
154    fido.datum      = datum;
155    fido.projection = projection;
[7819]156       
157    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
[7817]158
[7822]159    # Write out all the dynamic quantities for each timestep
[7819]160    for q in dynamic_quantities:
[7820]161        q_values = out_d_quantities[q]
162        for i, time_slice in enumerate(q_values):
[8277]163            fido.variables[q][i] = num.array(time_slice, netcdf_float32)
[7820]164       
[7819]165        # This updates the _range values
166        q_range = fido.variables[q + Write_sww.RANGE][:]
167        q_values_min = num.min(q_values)
168        if q_values_min < q_range[0]:
169            fido.variables[q + Write_sww.RANGE][0] = q_values_min
170        q_values_max = num.max(q_values)
171        if q_values_max > q_range[1]:
172            fido.variables[q + Write_sww.RANGE][1] = q_values_max       
[7817]173
[7819]174                                       
175    fido.close()
[8284]176
177
[8292]178def _sww_merge_parallel(swwfiles, output,  verbose=False, delete_old=False):
[8284]179    """
180        Merge a list of sww files into a single file.
181       
182        Use to merge files created by parallel runs.
183
184        The sww files to be merged must have exactly the same timesteps.
185
186        Note that some advanced information and custom quantities may not be
187        exported.
188       
189        swwfiles is a list of .sww files to merge.
190        output is the output filename, including .sww extension.
191        verbose True to log output information
192    """
193
194    if verbose:
195        print "MERGING SWW Files"
196       
197    static_quantities = ['elevation']
198    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
[7817]199   
[8284]200    first_file = True
201    tri_offset = 0
202    for filename in swwfiles:
203        if verbose:
204            print 'Reading file ', filename, ':'   
205   
206        fid = NetCDFFile(filename, netcdf_mode_r)
207         
208        if first_file:
[7817]209
[8284]210            times    = fid.variables['time'][:]
[8289]211            n_steps = len(times)
212            number_of_timesteps = fid.dimensions['number_of_timesteps']
213            starttime = int(fid.starttime)
[8284]214           
215            out_s_quantities = {}
216            out_d_quantities = {}
217
218
219            xllcorner = fid.xllcorner
220            yllcorner = fid.yllcorner
221
222            number_of_global_triangles = int(fid.number_of_global_triangles)
223            number_of_global_nodes     = int(fid.number_of_global_nodes)
224
225            order      = fid.order
226            xllcorner  = fid.xllcorner;
227            yllcorner  = fid.yllcorner ;
228            zone       = fid.zone;
229            false_easting  = fid.false_easting;
230            false_northing = fid.false_northing;
231            datum      = fid.datum;
232            projection = fid.projection;
233
234            g_volumes = num.zeros((number_of_global_triangles,3),num.int)
235            g_x = num.zeros((number_of_global_nodes,),num.float32)
236            g_y = num.zeros((number_of_global_nodes,),num.float32)
237
238            g_points = num.zeros((number_of_global_nodes,2),num.float32)
239
240            for quantity in static_quantities:
241                out_s_quantities[quantity] = num.zeros((number_of_global_nodes,),num.float32)
242
243            # Quantities are stored as a 2D array of timesteps x data.
244            for quantity in dynamic_quantities:
245                out_d_quantities[quantity] = \
[8289]246                      num.zeros((n_steps,number_of_global_nodes),num.float32)
[8284]247                 
248            description = 'merged:' + getattr(fid, 'description')         
249            first_file = False
250
251
252        # Read in from files and add to global arrays
253
254        tri_l2g  = fid.variables['tri_l2g'][:]
255        node_l2g = fid.variables['node_l2g'][:]
256        tri_full_flag = fid.variables['tri_full_flag'][:]
[8285]257        volumes = num.array(fid.variables['volumes'][:],dtype=num.int)
258        l_volumes = num.zeros_like(volumes)
[8284]259
[8285]260
261        # Change the local node ids to global id in the
262        # volume array
263 
264        for i in range(len(l_volumes)):
265            g_n0 = node_l2g[volumes[i,0]]
266            g_n1 = node_l2g[volumes[i,1]]
267            g_n2 = node_l2g[volumes[i,2]]
268       
269            l_volumes[i,:] = [g_n0,g_n1,g_n2]
270
[8284]271        # Just pick out the full triangles
272        ftri_l2g = num.compress(tri_full_flag, tri_l2g)
273
[8285]274        #print l_volumes
275        #print tri_full_flag
276        #print tri_l2g
277        #print ftri_l2g
278   
279        g_volumes[ftri_l2g] = num.compress(tri_full_flag,l_volumes,axis=0)
280
281
282
283
[8284]284        #g_x[node_l2g] = fid.variables['x']
285        #g_y[node_l2g] = fid.variables['y']
286
287        g_points[node_l2g,0] = fid.variables['x']
288        g_points[node_l2g,1] = fid.variables['y']
289       
290
[8285]291        #print number_of_timesteps
[8284]292       
293        # Read in static quantities
294        for quantity in static_quantities:
295            out_s_quantities[quantity][node_l2g] = \
296                         num.array(fid.variables[quantity],dtype=num.float32)
297
298       
299        #Collate all dynamic quantities according to their timestep
300        for quantity in dynamic_quantities:
301            q = fid.variables[quantity]
[8285]302            #print q.shape
[8289]303            for i in range(n_steps):
[8284]304                out_d_quantities[quantity][i][node_l2g] = \
305                           num.array(q[i],dtype=num.float32)
306
307
308
309
310        fid.close()
311
312
313    #---------------------------
314    # Write out the SWW file
315    #---------------------------
[8285]316    #print g_points.shape
[8284]317
[8285]318    #print number_of_global_triangles
319    #print number_of_global_nodes
[8284]320
321
322    if verbose:
323            print 'Writing file ', output, ':'
324    fido = NetCDFFile(output, netcdf_mode_w)
325    sww = Write_sww(static_quantities, dynamic_quantities)
[8289]326    sww.store_header(fido, starttime,
[8284]327                             number_of_global_triangles,
328                             number_of_global_nodes,
329                             description=description,
330                             sww_precision=netcdf_float32)
331
332
333
334    from anuga.coordinate_transforms.geo_reference import Geo_reference
335    geo_reference = Geo_reference()
336   
337    sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference)
338
339    fido.order      = order
340    fido.xllcorner  = xllcorner;
341    fido.yllcorner  = yllcorner ;
342    fido.zone       = zone;
343    fido.false_easting  = false_easting;
344    fido.false_northing = false_northing;
345    fido.datum      = datum;
346    fido.projection = projection;
347       
348    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
349
350    # Write out all the dynamic quantities for each timestep
[8290]351
352    for i in range(n_steps):
353        fido.variables['time'][i] = times[i]
354       
[8284]355    for q in dynamic_quantities:
356        q_values = out_d_quantities[q]
[8289]357        for i in range(n_steps):
[8284]358            fido.variables[q][i] = q_values[i]
359       
360        # This updates the _range values
361        q_range = fido.variables[q + Write_sww.RANGE][:]
362        q_values_min = num.min(q_values)
363        if q_values_min < q_range[0]:
364            fido.variables[q + Write_sww.RANGE][0] = q_values_min
365        q_values_max = num.max(q_values)
366        if q_values_max > q_range[1]:
367            fido.variables[q + Write_sww.RANGE][1] = q_values_max       
368
369                                       
[8285]370    #print out_s_quantities
371    #print out_d_quantities
[8284]372   
[8285]373    #print g_x
374    #print g_y
[8284]375
376    #print g_volumes
377   
378    fido.close()
[8291]379   
380    if delete_old:
381        import os
382        for filename in swwfiles:
[8284]383
[8291]384            if verbose:
385                print 'Deleting file ', filename, ':'
386            os.remove(filename)
[8284]387
[8270]388if __name__ == "__main__":
389
390    import argparse
391    from anuga.anuga_exceptions import ANUGAError
392
393
394    parser = argparse.ArgumentParser(description='Merge sww files created from parallel run')
395    parser.add_argument('-np', type=int, default = 4,
396                   help='number of processors used to produce sww files')
397    parser.add_argument('-f', type=str, default="domain",
[8273]398                   help='domain global name')
[8270]399    parser.add_argument('-v', nargs='?', type=bool, const=True, default=False,
400                   help='verbosity')
[8291]401    parser.add_argument('-delete_old', nargs='?', type=bool, const=True, default=False,
402                   help='Flag to delete the input files')
[8270]403    args = parser.parse_args()
404
405    np = args.np
[8273]406    domain_global_name = args.f
[8270]407    verbose = args.v
[8291]408    delete_old = args.delete_old
[8270]409
410
411    try:
[8291]412        sww_merge_parallel(domain_global_name, np, verbose, delete_old)
[8270]413    except:
[8273]414        msg = 'ERROR: When merging sww files %s '% domain_global_name
[8270]415        print msg
416        raise
Note: See TracBrowser for help on using the repository browser.