[7761] | 1 | from mux import WAVEHEIGHT_MUX2_LABEL, EAST_VELOCITY_MUX2_LABEL, \ |
---|
| 2 | NORTH_VELOCITY_MUX2_LABEL |
---|
| 3 | |
---|
[7758] | 4 | ## |
---|
| 5 | # @brief Convert a URS (mux2, wave propagation) file to an STS file. |
---|
| 6 | # @param basename_in String (or list) of source file stems. |
---|
| 7 | # @param basename_out Stem of output STS file path. |
---|
| 8 | # @param weights |
---|
| 9 | # @param verbose True if this function is to be verbose. |
---|
| 10 | # @param origin Tuple with with geo-ref UTM coords (zone, easting, northing). |
---|
| 11 | # @param zone |
---|
| 12 | # @param mean_stage |
---|
| 13 | # @param zscale |
---|
| 14 | # @param ordering_filename Path of a file specifying which mux2 gauge points are |
---|
| 15 | # to be stored. |
---|
| 16 | # @note Also convert latitude and longitude to UTM. All coordinates are |
---|
| 17 | # assumed to be given in the GDA94 datum. |
---|
| 18 | def urs2sts(basename_in, basename_out=None, |
---|
| 19 | weights=None, |
---|
| 20 | verbose=False, |
---|
| 21 | origin=None, |
---|
| 22 | zone=None, |
---|
| 23 | central_meridian=None, |
---|
| 24 | mean_stage=0.0, |
---|
| 25 | zscale=1.0, |
---|
| 26 | ordering_filename=None): |
---|
| 27 | """Convert URS mux2 format for wave propagation to sts format |
---|
| 28 | |
---|
| 29 | Also convert latitude and longitude to UTM. All coordinates are |
---|
| 30 | assumed to be given in the GDA94 datum |
---|
| 31 | |
---|
| 32 | origin is a 3-tuple with geo referenced |
---|
| 33 | UTM coordinates (zone, easting, northing) |
---|
| 34 | |
---|
| 35 | inputs: |
---|
| 36 | |
---|
| 37 | basename_in: list of source file prefixes |
---|
| 38 | |
---|
| 39 | These are combined with the extensions: |
---|
| 40 | WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage |
---|
| 41 | EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum |
---|
| 42 | NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum |
---|
| 43 | |
---|
| 44 | to create a 2D list of mux2 file. The rows are associated with each |
---|
| 45 | quantity and must have the above extensions |
---|
| 46 | the columns are the list of file prefixes. |
---|
| 47 | |
---|
| 48 | ordering: a .txt file name specifying which mux2 gauge points are |
---|
| 49 | to be stored. This is indicated by the index of the gauge |
---|
| 50 | in the ordering file. |
---|
| 51 | |
---|
| 52 | ordering file format: |
---|
| 53 | 1st line: 'index,longitude,latitude\n' |
---|
| 54 | other lines: index,longitude,latitude |
---|
| 55 | |
---|
| 56 | If ordering is None or ordering file is empty then |
---|
| 57 | all points are taken in the order they |
---|
| 58 | appear in the mux2 file. |
---|
| 59 | |
---|
| 60 | |
---|
| 61 | output: |
---|
| 62 | basename_out: name of sts file in which mux2 data is stored. |
---|
| 63 | |
---|
| 64 | |
---|
| 65 | |
---|
| 66 | NOTE: South is positive in mux files so sign of y-component of velocity is reverted |
---|
| 67 | """ |
---|
| 68 | |
---|
| 69 | import os |
---|
| 70 | from Scientific.IO.NetCDF import NetCDFFile |
---|
| 71 | from types import ListType,StringType |
---|
| 72 | from operator import __and__ |
---|
| 73 | |
---|
| 74 | if not isinstance(basename_in, ListType): |
---|
| 75 | if verbose: log.critical('Reading single source') |
---|
| 76 | basename_in = [basename_in] |
---|
| 77 | |
---|
| 78 | # This is the value used in the mux file format to indicate NAN data |
---|
| 79 | # FIXME (Ole): This should be changed everywhere to IEEE NAN when |
---|
| 80 | # we upgrade to Numpy |
---|
| 81 | NODATA = 99 |
---|
| 82 | |
---|
| 83 | # Check that basename is a list of strings |
---|
| 84 | if not reduce(__and__, map(lambda z:isinstance(z,StringType), basename_in)): |
---|
| 85 | msg= 'basename_in must be a string or list of strings' |
---|
| 86 | raise Exception, msg |
---|
| 87 | |
---|
| 88 | # Find the number of sources to be used |
---|
| 89 | numSrc = len(basename_in) |
---|
| 90 | |
---|
| 91 | # A weight must be specified for each source |
---|
| 92 | if weights is None: |
---|
| 93 | # Default is equal weighting |
---|
| 94 | weights = num.ones(numSrc, num.float) / numSrc |
---|
| 95 | else: |
---|
| 96 | weights = ensure_numeric(weights) |
---|
| 97 | msg = 'When combining multiple sources must specify a weight for ' \ |
---|
| 98 | 'mux2 source file' |
---|
| 99 | assert len(weights) == numSrc, msg |
---|
| 100 | |
---|
| 101 | if verbose: log.critical('Weights used in urs2sts: %s' % str(weights)) |
---|
| 102 | |
---|
| 103 | # Check output filename |
---|
| 104 | if basename_out is None: |
---|
| 105 | msg = 'STS filename must be specified as basename_out ' \ |
---|
| 106 | 'in function urs2sts' |
---|
| 107 | raise Exception, msg |
---|
| 108 | |
---|
| 109 | if basename_out.endswith('.sts'): |
---|
| 110 | stsname = basename_out |
---|
| 111 | else: |
---|
| 112 | stsname = basename_out + '.sts' |
---|
| 113 | |
---|
| 114 | # Create input filenames from basenames and check their existence |
---|
| 115 | files_in = [[], [], []] |
---|
| 116 | for files in basename_in: |
---|
| 117 | files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL), |
---|
| 118 | files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL) |
---|
| 119 | files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL) |
---|
| 120 | |
---|
| 121 | quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format |
---|
| 122 | for i in range(len(quantities)): |
---|
| 123 | for file_in in files_in[i]: |
---|
| 124 | if (os.access(file_in, os.R_OK) == 0): |
---|
| 125 | msg = 'File %s does not exist or is not accessible' % file_in |
---|
| 126 | raise IOError, msg |
---|
| 127 | |
---|
| 128 | # Establish permutation array |
---|
| 129 | if ordering_filename is not None: |
---|
| 130 | if verbose is True: log.critical('Reading ordering file %s' |
---|
| 131 | % ordering_filename) |
---|
| 132 | |
---|
| 133 | # Read ordering file |
---|
| 134 | try: |
---|
| 135 | fid = open(ordering_filename, 'r') |
---|
| 136 | file_header = fid.readline().split(',') |
---|
| 137 | ordering_lines = fid.readlines() |
---|
| 138 | fid.close() |
---|
| 139 | except: |
---|
| 140 | msg = 'Cannot open %s' % ordering_filename |
---|
| 141 | raise Exception, msg |
---|
| 142 | |
---|
| 143 | reference_header = 'index, longitude, latitude\n' |
---|
| 144 | reference_header_split = reference_header.split(',') |
---|
| 145 | for i in range(3): |
---|
| 146 | if not file_header[i].strip() == reference_header_split[i].strip(): |
---|
| 147 | msg = 'File must contain header: ' + reference_header |
---|
| 148 | raise Exception, msg |
---|
| 149 | |
---|
| 150 | if len(ordering_lines) < 2: |
---|
| 151 | msg = 'File must contain at least two points' |
---|
| 152 | raise Exception, msg |
---|
| 153 | |
---|
| 154 | permutation = [int(line.split(',')[0]) for line in ordering_lines] |
---|
| 155 | permutation = ensure_numeric(permutation) |
---|
| 156 | else: |
---|
| 157 | permutation = None |
---|
| 158 | |
---|
| 159 | # Read MUX2 files |
---|
| 160 | if (verbose): log.critical('reading mux2 file') |
---|
| 161 | |
---|
| 162 | mux={} |
---|
| 163 | for i, quantity in enumerate(quantities): |
---|
| 164 | # For each quantity read the associated list of source mux2 file with |
---|
| 165 | # extention associated with that quantity |
---|
| 166 | |
---|
| 167 | times, latitudes, longitudes, elevation, mux[quantity], starttime \ |
---|
| 168 | = read_mux2_py(files_in[i], weights, permutation, verbose=verbose) |
---|
| 169 | |
---|
| 170 | # Check that all quantities have consistent time and space information |
---|
| 171 | if quantity != quantities[0]: |
---|
| 172 | msg = '%s, %s and %s have inconsistent gauge data' \ |
---|
| 173 | % (files_in[0], files_in[1], files_in[2]) |
---|
| 174 | assert num.allclose(times, times_old), msg |
---|
| 175 | assert num.allclose(latitudes, latitudes_old), msg |
---|
| 176 | assert num.allclose(longitudes, longitudes_old), msg |
---|
| 177 | assert num.allclose(elevation, elevation_old), msg |
---|
| 178 | assert num.allclose(starttime, starttime_old), msg |
---|
| 179 | times_old = times |
---|
| 180 | latitudes_old = latitudes |
---|
| 181 | longitudes_old = longitudes |
---|
| 182 | elevation_old = elevation |
---|
| 183 | starttime_old = starttime |
---|
| 184 | |
---|
| 185 | # Self check - can be removed to improve speed |
---|
| 186 | #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines] |
---|
| 187 | #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines] |
---|
| 188 | # |
---|
| 189 | #msg = 'Longitudes specified in ordering file do not match those ' \ |
---|
| 190 | # 'found in mux files. ' \ |
---|
| 191 | # 'I got %s instead of %s (only beginning shown)' \ |
---|
| 192 | # % (str(longitudes[:10]) + '...', |
---|
| 193 | # str(ref_longitudes[:10]) + '...') |
---|
| 194 | #assert allclose(longitudes, ref_longitudes), msg |
---|
| 195 | # |
---|
| 196 | #msg = 'Latitudes specified in ordering file do not match those ' \ |
---|
| 197 | # 'found in mux files. ' |
---|
| 198 | # 'I got %s instead of %s (only beginning shown)' \ |
---|
| 199 | # % (str(latitudes[:10]) + '...', |
---|
| 200 | # str(ref_latitudes[:10]) + '...') |
---|
| 201 | #assert allclose(latitudes, ref_latitudes), msg |
---|
| 202 | |
---|
| 203 | # Store timeseries in STS file |
---|
| 204 | msg = 'File is empty and or clipped region not in file region' |
---|
| 205 | assert len(latitudes > 0), msg |
---|
| 206 | |
---|
| 207 | number_of_points = latitudes.shape[0] # Number of stations retrieved |
---|
| 208 | number_of_times = times.shape[0] # Number of timesteps |
---|
| 209 | number_of_latitudes = latitudes.shape[0] # Number latitudes |
---|
| 210 | number_of_longitudes = longitudes.shape[0] # Number longitudes |
---|
| 211 | |
---|
| 212 | # The permutation vector of contains original indices |
---|
| 213 | # as given in ordering file or None in which case points |
---|
| 214 | # are assigned the trivial indices enumerating them from |
---|
| 215 | # 0 to number_of_points-1 |
---|
| 216 | if permutation is None: |
---|
| 217 | permutation = num.arange(number_of_points, dtype=num.int) |
---|
| 218 | |
---|
| 219 | # NetCDF file definition |
---|
| 220 | outfile = NetCDFFile(stsname, netcdf_mode_w) |
---|
| 221 | |
---|
| 222 | description = 'Converted from URS mux2 files: %s' % basename_in |
---|
| 223 | |
---|
| 224 | # Create new file |
---|
| 225 | sts = Write_sts() |
---|
| 226 | sts.store_header(outfile, |
---|
| 227 | times+starttime, |
---|
| 228 | number_of_points, |
---|
| 229 | description=description, |
---|
| 230 | verbose=verbose, |
---|
| 231 | sts_precision=netcdf_float) |
---|
| 232 | |
---|
| 233 | # Store |
---|
| 234 | from anuga.coordinate_transforms.redfearn import redfearn |
---|
| 235 | |
---|
| 236 | x = num.zeros(number_of_points, num.float) # Easting |
---|
| 237 | y = num.zeros(number_of_points, num.float) # Northing |
---|
| 238 | |
---|
| 239 | # Check zone boundaries |
---|
| 240 | if zone is None: |
---|
| 241 | refzone, _, _ = redfearn(latitudes[0], longitudes[0], |
---|
| 242 | central_meridian=central_meridian) |
---|
| 243 | else: |
---|
| 244 | refzone = zone |
---|
| 245 | |
---|
| 246 | old_zone = refzone |
---|
| 247 | |
---|
| 248 | for i in range(number_of_points): |
---|
| 249 | computed_zone, easting, northing = redfearn(latitudes[i], longitudes[i], |
---|
| 250 | zone=zone, |
---|
| 251 | central_meridian=central_meridian) |
---|
| 252 | x[i] = easting |
---|
| 253 | y[i] = northing |
---|
| 254 | if computed_zone != refzone: |
---|
| 255 | msg = 'All sts gauges need to be in the same zone. \n' |
---|
| 256 | msg += 'offending gauge:Zone %d,%.4f, %4f\n' \ |
---|
| 257 | % (computed_zone, easting, northing) |
---|
| 258 | msg += 'previous gauge:Zone %d,%.4f, %4f' \ |
---|
| 259 | % (old_zone, old_easting, old_northing) |
---|
| 260 | raise Exception, msg |
---|
| 261 | old_zone = computed_zone |
---|
| 262 | old_easting = easting |
---|
| 263 | old_northing = northing |
---|
| 264 | |
---|
| 265 | if origin is None: |
---|
| 266 | origin = Geo_reference(refzone, min(x), min(y)) |
---|
| 267 | geo_ref = write_NetCDF_georeference(origin, outfile) |
---|
| 268 | |
---|
| 269 | elevation = num.resize(elevation, outfile.variables['elevation'][:].shape) |
---|
| 270 | outfile.variables['permutation'][:] = permutation.astype(num.int32) # Opteron 64 |
---|
| 271 | outfile.variables['x'][:] = x - geo_ref.get_xllcorner() |
---|
| 272 | outfile.variables['y'][:] = y - geo_ref.get_yllcorner() |
---|
| 273 | outfile.variables['elevation'][:] = elevation |
---|
| 274 | |
---|
| 275 | stage = outfile.variables['stage'] |
---|
| 276 | xmomentum = outfile.variables['xmomentum'] |
---|
| 277 | ymomentum = outfile.variables['ymomentum'] |
---|
| 278 | |
---|
| 279 | if verbose: log.critical('Converting quantities') |
---|
| 280 | |
---|
| 281 | for j in range(len(times)): |
---|
| 282 | for i in range(number_of_points): |
---|
| 283 | ha = mux['HA'][i,j] |
---|
| 284 | ua = mux['UA'][i,j] |
---|
| 285 | va = mux['VA'][i,j] |
---|
| 286 | if ha == NODATA: |
---|
| 287 | if verbose: |
---|
| 288 | msg = 'Setting nodata value %d to 0 at time = %f, ' \ |
---|
| 289 | 'point = %d' % (ha, times[j], i) |
---|
| 290 | log.critical(msg) |
---|
| 291 | ha = 0.0 |
---|
| 292 | ua = 0.0 |
---|
| 293 | va = 0.0 |
---|
| 294 | |
---|
| 295 | w = zscale*ha + mean_stage |
---|
| 296 | h = w - elevation[i] |
---|
| 297 | stage[j,i] = w |
---|
| 298 | |
---|
| 299 | xmomentum[j,i] = ua * h |
---|
| 300 | ymomentum[j,i] = -va * h # South is positive in mux files |
---|
| 301 | |
---|
| 302 | |
---|
| 303 | outfile.close() |
---|
| 304 | |
---|