source: trunk/anuga_core/source/anuga/utilities/sww_merge.py @ 8273

Last change on this file since 8273 was 8273, checked in by steve, 12 years ago

Updated sww_merge again. Hopefully doesn't break too many codes

File size: 5.4 KB
Line 
1"""
2    Merge a list of .sww files together into a single file.
3"""
4
5import numpy as num
6from anuga.utilities.numerical_tools import ensure_numeric
7
8from Scientific.IO.NetCDF import NetCDFFile
9from anuga.config import netcdf_mode_r, netcdf_mode_w, \
10                            netcdf_mode_a, netcdf_float
11from anuga.file.sww import SWW_file, Write_sww
12
13def sww_merge(domain_global_name, np, verbose=False):
14
15    output = domain_global_name+".sww"
16    swwfiles = [ domain_global_name+"_P"+str(v)+"_"+str(np)+".sww" for v in range(np)]
17
18    _sww_merge(swwfiles, output, verbose)
19
20
21def _sww_merge(swwfiles, output, verbose):
22    """
23        Merge a list of sww files into a single file.
24       
25        May be useful for parallel runs. Note that colinear points and
26        edges are not merged: there will essentially be multiple meshes within
27        the one sww file.
28       
29        The sww files to be merged must have exactly the same timesteps. Note
30        that some advanced information and custom quantities may not be
31        exported.
32       
33        swwfiles is a list of .sww files to merge.
34        output is the output filename, including .sww extension.
35        verbose True to log output information
36    """
37
38    if verbose:
39        print "MERGING SWW Files"
40       
41    static_quantities = ['elevation']
42    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
43   
44    first_file = True
45    tri_offset = 0
46    for filename in swwfiles:
47        if verbose:
48            print 'Reading file ', filename, ':'   
49   
50        fid = NetCDFFile(filename, netcdf_mode_r)
51        tris = fid.variables['volumes'][:]       
52         
53        if first_file:
54            times = fid.variables['time'][:]
55            x = []
56            y = []
57            out_tris = list(tris) 
58            out_s_quantities = {}
59            out_d_quantities = {}
60           
61            for quantity in static_quantities:
62                out_s_quantities[quantity] = []
63
64            # Quantities are stored as a 2D array of timesteps x data.
65            for quantity in dynamic_quantities:
66                out_d_quantities[quantity] = [ [] for _ in range(len(times))]
67                 
68            description = 'merged:' + getattr(fid, 'description')         
69            first_file = False
70        else:
71            for tri in tris:
72                # Advance new tri indices to point at newly appended points.
73                verts = [vertex+tri_offset for vertex in tri]
74                out_tris.append(verts)
75
76        num_pts = fid.dimensions['number_of_points']
77        tri_offset += num_pts
78       
79        if verbose:
80            print '  new triangle index offset is ', tri_offset
81           
82        x.extend(list(fid.variables['x'][:]))
83        y.extend(list(fid.variables['y'][:]))
84       
85        # Grow the list of static quantities associated with the x,y points
86        for quantity in static_quantities:
87            out_s_quantities[quantity].extend(fid.variables[quantity][:])
88           
89        #Collate all dynamic quantities according to their timestep
90        for quantity in dynamic_quantities:
91            time_chunks = fid.variables[quantity][:]
92            for i, time_chunk in enumerate(time_chunks):
93                out_d_quantities[quantity][i].extend(time_chunk)           
94   
95    # Mash all points into a single big list   
96    points = [[xx, yy] for xx, yy in zip(x, y)]
97    fid.close()
98   
99    # Write out the SWW file
100
101    if verbose:
102            print 'Writing file ', output, ':'
103    fido = NetCDFFile(output, netcdf_mode_w)
104    sww = Write_sww(static_quantities, dynamic_quantities)
105    sww.store_header(fido, times,
106                             len(out_tris),
107                             len(points),
108                             description=description,
109                             sww_precision=netcdf_float)
110
111
112    sww.store_triangulation(fido, points, out_tris)
113       
114    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
115
116    # Write out all the dynamic quantities for each timestep
117    for q in dynamic_quantities:
118        q_values = out_d_quantities[q]
119        for i, time_slice in enumerate(q_values):
120            fido.variables[q][i] = num.array(time_slice, netcdf_float)
121       
122        # This updates the _range values
123        q_range = fido.variables[q + Write_sww.RANGE][:]
124        q_values_min = num.min(q_values)
125        if q_values_min < q_range[0]:
126            fido.variables[q + Write_sww.RANGE][0] = q_values_min
127        q_values_max = num.max(q_values)
128        if q_values_max > q_range[1]:
129            fido.variables[q + Write_sww.RANGE][1] = q_values_max       
130
131                                       
132    fido.close()
133   
134
135if __name__ == "__main__":
136
137    import argparse
138    from anuga.anuga_exceptions import ANUGAError
139
140
141    parser = argparse.ArgumentParser(description='Merge sww files created from parallel run')
142    parser.add_argument('-np', type=int, default = 4,
143                   help='number of processors used to produce sww files')
144    parser.add_argument('-f', type=str, default="domain",
145                   help='domain global name')
146    parser.add_argument('-v', nargs='?', type=bool, const=True, default=False,
147                   help='verbosity')
148
149    args = parser.parse_args()
150
151    np = args.np
152    domain_global_name = args.f
153    verbose = args.v
154
155
156    try:
157        sww_merge(domain_global_name, np, verbose)
158    except:
159        msg = 'ERROR: When merging sww files %s '% domain_global_name
160        print msg
161        raise
Note: See TracBrowser for help on using the repository browser.