source: trunk/anuga_core/source/anuga/file_conversion/sww2pts.py @ 9516

Last change on this file since 9516 was 8780, checked in by steve, 12 years ago

Some changes to allow netcdf4 use

File size: 6.0 KB
Line 
1
2import numpy as num
3import os
4
5from anuga.coordinate_transforms.geo_reference import Geo_reference
6
7def sww2pts(name_in, name_out=None,
8            data_points=None,
9            quantity=None,
10            timestep=None,
11            reduction=None,
12            NODATA_value=-9999,
13            verbose=False,
14            origin=None):
15    """Read SWW file and convert to interpolated values at selected points
16
17    The parameter 'quantity' must be the name of an existing quantity or
18    an expression involving existing quantities. The default is 'elevation'.
19
20    if timestep (an index) is given, output quantity at that timestep.
21
22    if reduction is given use that to reduce quantity over all timesteps.
23
24    data_points (Nx2 array) give locations of points where quantity is to
25    be computed.
26    """
27
28    import sys
29    from anuga.geometry.polygon import inside_polygon, outside_polygon
30    from anuga.abstract_2d_finite_volumes.util import \
31             apply_expression_to_dictionary
32    from anuga.geospatial_data.geospatial_data import Geospatial_data
33
34    if quantity is None:
35        quantity = 'elevation'
36
37    if reduction is None:
38        reduction = max
39
40    basename_in, in_ext = os.path.splitext(name_in)
41   
42    if name_out != None:
43        basename_out, out_ext = os.path.splitext(name_out)
44    else:
45        basename_out = basename_in + '_%s' % quantity
46        out_ext = '.pts'
47        name_out = basename_out + out_ext
48
49    if in_ext != '.sww':
50        raise IOError('Input format for %s must be .sww' % name_in)
51
52    if out_ext != '.pts':
53        raise IOError('Output format for %s must be .pts' % name_out)
54
55
56    # Read sww file
57    if verbose: log.critical('Reading from %s' % name_in)
58    from anuga.file.netcdf import NetCDFFile
59    fid = NetCDFFile(name_in)
60
61    # Get extent and reference
62    x = fid.variables['x'][:]
63    y = fid.variables['y'][:]
64    volumes = fid.variables['volumes'][:]
65
66
67    try: # works with netcdf4
68        number_of_timesteps = len(fid.dimensions['number_of_timesteps'])
69        number_of_points = len(fid.dimensions['number_of_points'])
70    except: #works with scientific.io.netcdf
71        number_of_timesteps = fid.dimensions['number_of_timesteps']
72        number_of_points = fid.dimensions['number_of_points']
73
74       
75    if origin is None:
76        # Get geo_reference
77        # sww files don't have to have a geo_ref
78        try:
79            geo_reference = Geo_reference(NetCDFObject=fid)
80        except AttributeError, e:
81            geo_reference = Geo_reference() # Default georef object
82
83        xllcorner = geo_reference.get_xllcorner()
84        yllcorner = geo_reference.get_yllcorner()
85        zone = geo_reference.get_zone()
86    else:
87        zone = origin[0]
88        xllcorner = origin[1]
89        yllcorner = origin[2]
90
91    # FIXME: Refactor using code from file_function.statistics
92    # Something like print swwstats(swwname)
93    if verbose:
94        x = fid.variables['x'][:]
95        y = fid.variables['y'][:]
96        times = fid.variables['time'][:]
97        log.critical('------------------------------------------------')
98        log.critical('Statistics of SWW file:')
99        log.critical('  Name: %s' % swwfile)
100        log.critical('  Reference:')
101        log.critical('    Lower left corner: [%f, %f]' % (xllcorner, yllcorner))
102        log.critical('    Start time: %f' % fid.starttime[0])
103        log.critical('  Extent:')
104        log.critical('    x [m] in [%f, %f], len(x) == %d'
105                     % (num.min(x), num.max(x), len(x.flat)))
106        log.critical('    y [m] in [%f, %f], len(y) == %d'
107                     % (num.min(y), num.max(y), len(y.flat)))
108        log.critical('    t [s] in [%f, %f], len(t) == %d'
109                     % (min(times), max(times), len(times)))
110        log.critical('  Quantities [SI units]:')
111        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
112            q = fid.variables[name][:].flat
113            log.critical('    %s in [%f, %f]' % (name, min(q), max(q)))
114
115    # Get quantity and reduce if applicable
116    if verbose: log.critical('Processing quantity %s' % quantity)
117
118    # Turn NetCDF objects into numeric arrays
119    quantity_dict = {}
120    for name in fid.variables.keys():
121        quantity_dict[name] = fid.variables[name][:]
122
123    # Convert quantity expression to quantities found in sww file
124    q = apply_expression_to_dictionary(quantity, quantity_dict)
125
126    if len(q.shape) == 2:
127        # q has a time component and needs to be reduced along
128        # the temporal dimension
129        if verbose: log.critical('Reducing quantity %s' % quantity)
130
131        q_reduced = num.zeros(number_of_points, num.float)
132        for k in range(number_of_points):
133            q_reduced[k] = reduction(q[:,k])
134        q = q_reduced
135
136    # Post condition: Now q has dimension: number_of_points
137    assert len(q.shape) == 1
138    assert q.shape[0] == number_of_points
139
140    if verbose:
141        log.critical('Processed values for %s are in [%f, %f]'
142                     % (quantity, min(q), max(q)))
143
144    # Create grid and update xll/yll corner and x,y
145    vertex_points = num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1)
146    assert len(vertex_points.shape) == 2
147
148    # Interpolate
149    from anuga.fit_interpolate.interpolate import Interpolate
150    interp = Interpolate(vertex_points, volumes, verbose=verbose)
151
152    # Interpolate using quantity values
153    if verbose: log.critical('Interpolating')
154    interpolated_values = interp.interpolate(q, data_points).flatten()
155
156    if verbose:
157        log.critical('Interpolated values are in [%f, %f]'
158                     % (num.min(interpolated_values),
159                        num.max(interpolated_values)))
160
161    # Assign NODATA_value to all points outside bounding polygon
162    # (from interpolation mesh)
163    P = interp.mesh.get_boundary_polygon()
164    outside_indices = outside_polygon(data_points, P, closed=True)
165
166    for i in outside_indices:
167        interpolated_values[i] = NODATA_value
168
169    # Store results
170    G = Geospatial_data(data_points=data_points, attributes=interpolated_values)
171
172    G.export_points_file(name_out, absolute = True)
173
174    fid.close()
175
Note: See TracBrowser for help on using the repository browser.