source: trunk/anuga_core/source/anuga/file_conversion/urs2sts.py @ 7853

Last change on this file since 7853 was 7778, checked in by hudson, 14 years ago

Cleaned up unit tests to use API.

File size: 11.8 KB
Line 
1import numpy as num
2import log
3
4# ANUGA modules
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \
6                            netcdf_float
7from anuga.utilities.numerical_tools import ensure_numeric
8from anuga.coordinate_transforms.geo_reference import Geo_reference, \
9     write_NetCDF_georeference, ensure_geo_reference
10
11# local modules
12from anuga.file.mux import read_mux2_py
13from anuga.file.mux import WAVEHEIGHT_MUX2_LABEL, EAST_VELOCITY_MUX2_LABEL, \
14                NORTH_VELOCITY_MUX2_LABEL
15from anuga.file.sts import Write_sts               
16               
17
18##
19# @brief Convert a URS (mux2, wave propagation) file to an STS file.
20# @param basename_in String (or list) of source file stems.
21# @param basename_out Stem of output STS file path.
22# @param weights
23# @param verbose True if this function is to be verbose.
24# @param origin Tuple with with geo-ref UTM coords (zone, easting, northing).
25# @param zone
26# @param mean_stage
27# @param zscale
28# @param ordering_filename Path of a file specifying which mux2 gauge points are
29#                          to be stored.
30# @note Also convert latitude and longitude to UTM. All coordinates are
31#       assumed to be given in the GDA94 datum.
32def urs2sts(basename_in, basename_out=None,
33            weights=None,
34            verbose=False,
35            origin=None,
36            zone=None,
37            central_meridian=None,           
38            mean_stage=0.0,
39            zscale=1.0,
40            ordering_filename=None):
41    """Convert URS mux2 format for wave propagation to sts format
42
43    Also convert latitude and longitude to UTM. All coordinates are
44    assumed to be given in the GDA94 datum
45
46    origin is a 3-tuple with geo referenced
47    UTM coordinates (zone, easting, northing)
48
49    inputs:
50
51    basename_in: list of source file prefixes
52
53        These are combined with the extensions:
54        WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
55        EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum
56        NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum
57
58        to create a 2D list of mux2 file. The rows are associated with each
59        quantity and must have the above extensions
60        the columns are the list of file prefixes.
61
62    ordering: a .txt file name specifying which mux2 gauge points are
63              to be stored. This is indicated by the index of the gauge
64              in the ordering file.
65
66              ordering file format:
67              1st line:    'index,longitude,latitude\n'
68              other lines: index,longitude,latitude
69
70              If ordering is None or ordering file is empty then
71               all points are taken in the order they
72              appear in the mux2 file.
73
74
75    output:
76      basename_out: name of sts file in which mux2 data is stored.
77     
78     
79     
80    NOTE: South is positive in mux files so sign of y-component of velocity is reverted
81    """
82
83    import os
84    from Scientific.IO.NetCDF import NetCDFFile
85    from types import ListType,StringType
86    from operator import __and__
87
88    if not isinstance(basename_in, ListType):
89        if verbose: log.critical('Reading single source')
90        basename_in = [basename_in]
91
92    # This is the value used in the mux file format to indicate NAN data
93    # FIXME (Ole): This should be changed everywhere to IEEE NAN when
94    #              we upgrade to Numpy
95    NODATA = 99
96
97    # Check that basename is a list of strings
98    if not reduce(__and__, map(lambda z:isinstance(z,StringType), basename_in)):
99        msg= 'basename_in must be a string or list of strings'
100        raise Exception, msg
101
102    # Find the number of sources to be used
103    numSrc = len(basename_in)
104
105    # A weight must be specified for each source
106    if weights is None:
107        # Default is equal weighting
108        weights = num.ones(numSrc, num.float) / numSrc
109    else:
110        weights = ensure_numeric(weights)
111        msg = 'When combining multiple sources must specify a weight for ' \
112              'mux2 source file'
113        assert len(weights) == numSrc, msg
114
115    if verbose: log.critical('Weights used in urs2sts: %s' % str(weights))
116
117    # Check output filename
118    if basename_out is None:
119        msg = 'STS filename must be specified as basename_out ' \
120              'in function urs2sts'
121        raise Exception, msg
122
123    if basename_out.endswith('.sts'):
124        stsname = basename_out
125    else:
126        stsname = basename_out + '.sts'
127
128    # Create input filenames from basenames and check their existence
129    files_in = [[], [], []]
130    for files in basename_in:
131        files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
132        files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
133        files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)
134
135    quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format
136    for i in range(len(quantities)):
137        for file_in in files_in[i]:
138            if (os.access(file_in, os.R_OK) == 0):
139                msg = 'File %s does not exist or is not accessible' % file_in
140                raise IOError, msg
141
142    # Establish permutation array
143    if ordering_filename is not None:
144        if verbose is True: log.critical('Reading ordering file %s'
145                                         % ordering_filename)
146
147        # Read ordering file
148        try:
149            fid = open(ordering_filename, 'r')
150            file_header = fid.readline().split(',')
151            ordering_lines = fid.readlines()
152            fid.close()
153        except:
154            msg = 'Cannot open %s' % ordering_filename
155            raise Exception, msg
156
157        reference_header = 'index, longitude, latitude\n'
158        reference_header_split = reference_header.split(',')
159        for i in range(3):
160            if not file_header[i].strip() == reference_header_split[i].strip():
161                msg = 'File must contain header: ' + reference_header
162                raise Exception, msg
163
164        if len(ordering_lines) < 2:
165            msg = 'File must contain at least two points'
166            raise Exception, msg
167
168        permutation = [int(line.split(',')[0]) for line in ordering_lines]
169        permutation = ensure_numeric(permutation)
170    else:
171        permutation = None
172
173    # Read MUX2 files
174    if (verbose): log.critical('reading mux2 file')
175
176    mux={}
177    for i, quantity in enumerate(quantities):
178        # For each quantity read the associated list of source mux2 file with
179        # extention associated with that quantity
180
181        times, latitudes, longitudes, elevation, mux[quantity], starttime \
182            = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)
183
184        # Check that all quantities have consistent time and space information
185        if quantity != quantities[0]:
186            msg = '%s, %s and %s have inconsistent gauge data' \
187                  % (files_in[0], files_in[1], files_in[2])
188            assert num.allclose(times, times_old), msg
189            assert num.allclose(latitudes, latitudes_old), msg
190            assert num.allclose(longitudes, longitudes_old), msg
191            assert num.allclose(elevation, elevation_old), msg
192            assert num.allclose(starttime, starttime_old), msg
193        times_old = times
194        latitudes_old = latitudes
195        longitudes_old = longitudes
196        elevation_old = elevation
197        starttime_old = starttime
198
199        # Self check - can be removed to improve speed
200        #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]
201        #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]
202        #
203        #msg = 'Longitudes specified in ordering file do not match those ' \
204        #      'found in mux files. ' \
205        #      'I got %s instead of %s (only beginning shown)' \
206        #      % (str(longitudes[:10]) + '...',
207        #         str(ref_longitudes[:10]) + '...')
208        #assert allclose(longitudes, ref_longitudes), msg
209        #
210        #msg = 'Latitudes specified in ordering file do not match those ' \
211        #      'found in mux files. '
212        #      'I got %s instead of %s (only beginning shown)' \
213        #      % (str(latitudes[:10]) + '...',
214        #         str(ref_latitudes[:10]) + '...')
215        #assert allclose(latitudes, ref_latitudes), msg
216
217    # Store timeseries in STS file
218    msg = 'File is empty and or clipped region not in file region'
219    assert len(latitudes > 0), msg
220
221    number_of_points = latitudes.shape[0]      # Number of stations retrieved
222    number_of_times = times.shape[0]           # Number of timesteps
223    number_of_latitudes = latitudes.shape[0]   # Number latitudes
224    number_of_longitudes = longitudes.shape[0] # Number longitudes
225
226    # The permutation vector of contains original indices
227    # as given in ordering file or None in which case points
228    # are assigned the trivial indices enumerating them from
229    # 0 to number_of_points-1
230    if permutation is None:
231        permutation = num.arange(number_of_points, dtype=num.int)
232
233    # NetCDF file definition
234    outfile = NetCDFFile(stsname, netcdf_mode_w)
235
236    description = 'Converted from URS mux2 files: %s' % basename_in
237
238    # Create new file
239    sts = Write_sts()
240    sts.store_header(outfile,
241                     times+starttime,
242                     number_of_points,
243                     description=description,
244                     verbose=verbose,
245                     sts_precision=netcdf_float)
246
247    # Store
248    from anuga.coordinate_transforms.redfearn import redfearn
249
250    x = num.zeros(number_of_points, num.float)  # Easting
251    y = num.zeros(number_of_points, num.float)  # Northing
252
253    # Check zone boundaries
254    if zone is None:
255        refzone, _, _ = redfearn(latitudes[0], longitudes[0],
256                                 central_meridian=central_meridian)
257    else:
258        refzone = zone
259
260    old_zone = refzone
261
262    for i in range(number_of_points):
263        computed_zone, easting, northing = redfearn(latitudes[i], longitudes[i],
264                                                    zone=zone,
265                                                    central_meridian=central_meridian)
266        x[i] = easting
267        y[i] = northing
268        if computed_zone != refzone:
269            msg = 'All sts gauges need to be in the same zone. \n'
270            msg += 'offending gauge:Zone %d,%.4f, %4f\n' \
271                   % (computed_zone, easting, northing)
272            msg += 'previous gauge:Zone %d,%.4f, %4f' \
273                   % (old_zone, old_easting, old_northing)
274            raise Exception, msg
275        old_zone = computed_zone
276        old_easting = easting
277        old_northing = northing
278
279    if origin is None:
280        origin = Geo_reference(refzone, min(x), min(y))
281    geo_ref = write_NetCDF_georeference(origin, outfile)
282
283    elevation = num.resize(elevation, outfile.variables['elevation'][:].shape)
284    outfile.variables['permutation'][:] = permutation.astype(num.int32) # Opteron 64
285    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
286    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
287    outfile.variables['elevation'][:] = elevation
288
289    stage = outfile.variables['stage']
290    xmomentum = outfile.variables['xmomentum']
291    ymomentum = outfile.variables['ymomentum']
292
293    if verbose: log.critical('Converting quantities')
294
295    for j in range(len(times)):
296        for i in range(number_of_points):
297            ha = mux['HA'][i,j]
298            ua = mux['UA'][i,j]
299            va = mux['VA'][i,j]
300            if ha == NODATA:
301                if verbose:
302                    msg = 'Setting nodata value %d to 0 at time = %f, ' \
303                          'point = %d' % (ha, times[j], i)
304                    log.critical(msg)
305                ha = 0.0
306                ua = 0.0
307                va = 0.0
308
309            w = zscale*ha + mean_stage
310            h = w - elevation[i]
311            stage[j,i] = w
312
313            xmomentum[j,i] = ua * h
314            ymomentum[j,i] = -va * h # South is positive in mux files
315
316
317    outfile.close()
318   
319    if verbose:
320        log.critical('Wrote sts file ' + stsname)   
321   
322
Note: See TracBrowser for help on using the repository browser.