1 | |
---|
2 | import numpy as num |
---|
3 | import os |
---|
4 | |
---|
5 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
6 | |
---|
7 | def sww2pts(name_in, name_out=None, |
---|
8 | data_points=None, |
---|
9 | quantity=None, |
---|
10 | timestep=None, |
---|
11 | reduction=None, |
---|
12 | NODATA_value=-9999, |
---|
13 | verbose=False, |
---|
14 | origin=None): |
---|
15 | """Read SWW file and convert to interpolated values at selected points |
---|
16 | |
---|
17 | The parameter 'quantity' must be the name of an existing quantity or |
---|
18 | an expression involving existing quantities. The default is 'elevation'. |
---|
19 | |
---|
20 | if timestep (an index) is given, output quantity at that timestep. |
---|
21 | |
---|
22 | if reduction is given use that to reduce quantity over all timesteps. |
---|
23 | |
---|
24 | data_points (Nx2 array) give locations of points where quantity is to |
---|
25 | be computed. |
---|
26 | """ |
---|
27 | |
---|
28 | import sys |
---|
29 | from anuga.geometry.polygon import inside_polygon, outside_polygon |
---|
30 | from anuga.abstract_2d_finite_volumes.util import \ |
---|
31 | apply_expression_to_dictionary |
---|
32 | from anuga.geospatial_data.geospatial_data import Geospatial_data |
---|
33 | |
---|
34 | if quantity is None: |
---|
35 | quantity = 'elevation' |
---|
36 | |
---|
37 | if reduction is None: |
---|
38 | reduction = max |
---|
39 | |
---|
40 | basename_in, in_ext = os.path.splitext(name_in) |
---|
41 | |
---|
42 | if name_out != None: |
---|
43 | basename_out, out_ext = os.path.splitext(name_out) |
---|
44 | else: |
---|
45 | basename_out = basename_in + '_%s' % quantity |
---|
46 | out_ext = '.pts' |
---|
47 | name_out = basename_out + out_ext |
---|
48 | |
---|
49 | if in_ext != '.sww': |
---|
50 | raise IOError('Input format for %s must be .sww' % name_in) |
---|
51 | |
---|
52 | if out_ext != '.pts': |
---|
53 | raise IOError('Output format for %s must be .pts' % name_out) |
---|
54 | |
---|
55 | |
---|
56 | # Read sww file |
---|
57 | if verbose: log.critical('Reading from %s' % name_in) |
---|
58 | from Scientific.IO.NetCDF import NetCDFFile |
---|
59 | fid = NetCDFFile(name_in) |
---|
60 | |
---|
61 | # Get extent and reference |
---|
62 | x = fid.variables['x'][:] |
---|
63 | y = fid.variables['y'][:] |
---|
64 | volumes = fid.variables['volumes'][:] |
---|
65 | |
---|
66 | number_of_timesteps = fid.dimensions['number_of_timesteps'] |
---|
67 | number_of_points = fid.dimensions['number_of_points'] |
---|
68 | if origin is None: |
---|
69 | # Get geo_reference |
---|
70 | # sww files don't have to have a geo_ref |
---|
71 | try: |
---|
72 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
73 | except AttributeError, e: |
---|
74 | geo_reference = Geo_reference() # Default georef object |
---|
75 | |
---|
76 | xllcorner = geo_reference.get_xllcorner() |
---|
77 | yllcorner = geo_reference.get_yllcorner() |
---|
78 | zone = geo_reference.get_zone() |
---|
79 | else: |
---|
80 | zone = origin[0] |
---|
81 | xllcorner = origin[1] |
---|
82 | yllcorner = origin[2] |
---|
83 | |
---|
84 | # FIXME: Refactor using code from file_function.statistics |
---|
85 | # Something like print swwstats(swwname) |
---|
86 | if verbose: |
---|
87 | x = fid.variables['x'][:] |
---|
88 | y = fid.variables['y'][:] |
---|
89 | times = fid.variables['time'][:] |
---|
90 | log.critical('------------------------------------------------') |
---|
91 | log.critical('Statistics of SWW file:') |
---|
92 | log.critical(' Name: %s' % swwfile) |
---|
93 | log.critical(' Reference:') |
---|
94 | log.critical(' Lower left corner: [%f, %f]' % (xllcorner, yllcorner)) |
---|
95 | log.critical(' Start time: %f' % fid.starttime[0]) |
---|
96 | log.critical(' Extent:') |
---|
97 | log.critical(' x [m] in [%f, %f], len(x) == %d' |
---|
98 | % (num.min(x), num.max(x), len(x.flat))) |
---|
99 | log.critical(' y [m] in [%f, %f], len(y) == %d' |
---|
100 | % (num.min(y), num.max(y), len(y.flat))) |
---|
101 | log.critical(' t [s] in [%f, %f], len(t) == %d' |
---|
102 | % (min(times), max(times), len(times))) |
---|
103 | log.critical(' Quantities [SI units]:') |
---|
104 | for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']: |
---|
105 | q = fid.variables[name][:].flat |
---|
106 | log.critical(' %s in [%f, %f]' % (name, min(q), max(q))) |
---|
107 | |
---|
108 | # Get quantity and reduce if applicable |
---|
109 | if verbose: log.critical('Processing quantity %s' % quantity) |
---|
110 | |
---|
111 | # Turn NetCDF objects into numeric arrays |
---|
112 | quantity_dict = {} |
---|
113 | for name in fid.variables.keys(): |
---|
114 | quantity_dict[name] = fid.variables[name][:] |
---|
115 | |
---|
116 | # Convert quantity expression to quantities found in sww file |
---|
117 | q = apply_expression_to_dictionary(quantity, quantity_dict) |
---|
118 | |
---|
119 | if len(q.shape) == 2: |
---|
120 | # q has a time component and needs to be reduced along |
---|
121 | # the temporal dimension |
---|
122 | if verbose: log.critical('Reducing quantity %s' % quantity) |
---|
123 | |
---|
124 | q_reduced = num.zeros(number_of_points, num.float) |
---|
125 | for k in range(number_of_points): |
---|
126 | q_reduced[k] = reduction(q[:,k]) |
---|
127 | q = q_reduced |
---|
128 | |
---|
129 | # Post condition: Now q has dimension: number_of_points |
---|
130 | assert len(q.shape) == 1 |
---|
131 | assert q.shape[0] == number_of_points |
---|
132 | |
---|
133 | if verbose: |
---|
134 | log.critical('Processed values for %s are in [%f, %f]' |
---|
135 | % (quantity, min(q), max(q))) |
---|
136 | |
---|
137 | # Create grid and update xll/yll corner and x,y |
---|
138 | vertex_points = num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1) |
---|
139 | assert len(vertex_points.shape) == 2 |
---|
140 | |
---|
141 | # Interpolate |
---|
142 | from anuga.fit_interpolate.interpolate import Interpolate |
---|
143 | interp = Interpolate(vertex_points, volumes, verbose=verbose) |
---|
144 | |
---|
145 | # Interpolate using quantity values |
---|
146 | if verbose: log.critical('Interpolating') |
---|
147 | interpolated_values = interp.interpolate(q, data_points).flatten() |
---|
148 | |
---|
149 | if verbose: |
---|
150 | log.critical('Interpolated values are in [%f, %f]' |
---|
151 | % (num.min(interpolated_values), |
---|
152 | num.max(interpolated_values))) |
---|
153 | |
---|
154 | # Assign NODATA_value to all points outside bounding polygon |
---|
155 | # (from interpolation mesh) |
---|
156 | P = interp.mesh.get_boundary_polygon() |
---|
157 | outside_indices = outside_polygon(data_points, P, closed=True) |
---|
158 | |
---|
159 | for i in outside_indices: |
---|
160 | interpolated_values[i] = NODATA_value |
---|
161 | |
---|
162 | # Store results |
---|
163 | G = Geospatial_data(data_points=data_points, attributes=interpolated_values) |
---|
164 | |
---|
165 | G.export_points_file(name_out, absolute = True) |
---|
166 | |
---|
167 | fid.close() |
---|
168 | |
---|