Changeset 7759


Ignore:
Timestamp:
May 31, 2010, 4:58:56 PM (14 years ago)
Author:
hudson
Message:

Cut out file conversion routines from data_manager.

Location:
trunk/anuga_core/source/anuga/shallow_water
Files:
1 deleted
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/anuga_core/source/anuga/shallow_water/data_manager.py

    r7751 r7759  
    365365    infile.close()
    366366    outfile.close()
    367 
    368 
    369 
    370 ##
    371 # @brief  Return block of surface lines for each cross section
    372 # @param lines Iterble  of text lines to process.
    373 # @note BROKEN?  UNUSED?
    374 def _read_hecras_cross_sections(lines):
    375     """Return block of surface lines for each cross section
    376     Starts with SURFACE LINE,
    377     Ends with END CROSS-SECTION
    378     """
    379 
    380     points = []
    381 
    382     reading_surface = False
    383     for i, line in enumerate(lines):
    384         if len(line.strip()) == 0:    #Ignore blanks
    385             continue
    386 
    387         if lines[i].strip().startswith('SURFACE LINE'):
    388             reading_surface = True
    389             continue
    390 
    391         if lines[i].strip().startswith('END') and reading_surface:
    392             yield points
    393             reading_surface = False
    394             points = []
    395 
    396         if reading_surface:
    397             fields = line.strip().split(',')
    398             easting = float(fields[0])
    399             northing = float(fields[1])
    400             elevation = float(fields[2])
    401             points.append([easting, northing, elevation])
    402 
    403 
    404 ##
    405 # @brief Convert HECRAS (.sdf) file to PTS file.
    406 # @param basename_in Sterm of input filename.
    407 # @param basename_out Sterm of output filename.
    408 # @param verbose True if this function is to be verbose.
    409 def hecras_cross_sections2pts(basename_in,
    410                               basename_out=None,
    411                               verbose=False):
    412     """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
    413 
    414     Example:
    415 
    416 # RAS export file created on Mon 15Aug2005 11:42
    417 # by HEC-RAS Version 3.1.1
    418 
    419 BEGIN HEADER:
    420   UNITS: METRIC
    421   DTM TYPE: TIN
    422   DTM: v:\1\cit\perth_topo\river_tin
    423   STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
    424   CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
    425   MAP PROJECTION: UTM
    426   PROJECTION ZONE: 50
    427   DATUM: AGD66
    428   VERTICAL DATUM:
    429   NUMBER OF REACHES:  19
    430   NUMBER OF CROSS-SECTIONS:  14206
    431 END HEADER:
    432 
    433 Only the SURFACE LINE data of the following form will be utilised
    434   CROSS-SECTION:
    435     STREAM ID:Southern-Wungong
    436     REACH ID:Southern-Wungong
    437     STATION:19040.*
    438     CUT LINE:
    439       405548.671603161 , 6438142.7594925
    440       405734.536092045 , 6438326.10404912
    441       405745.130459356 , 6438331.48627354
    442       405813.89633823 , 6438368.6272789
    443     SURFACE LINE:
    444      405548.67,   6438142.76,   35.37
    445      405552.24,   6438146.28,   35.41
    446      405554.78,   6438148.78,   35.44
    447      405555.80,   6438149.79,   35.44
    448      405559.37,   6438153.31,   35.45
    449      405560.88,   6438154.81,   35.44
    450      405562.93,   6438156.83,   35.42
    451      405566.50,   6438160.35,   35.38
    452      405566.99,   6438160.83,   35.37
    453      ...
    454    END CROSS-SECTION
    455 
    456     Convert to NetCDF pts format which is
    457 
    458     points:  (Nx2) float array
    459     elevation: N float array
    460     """
    461 
    462     import os
    463     from Scientific.IO.NetCDF import NetCDFFile
    464 
    465     root = basename_in
    466 
    467     # Get ASCII file
    468     infile = open(root + '.sdf', 'r')
    469 
    470     if verbose: log.critical('Reading DEM from %s' % (root + '.sdf'))
    471 
    472     lines = infile.readlines()
    473     infile.close()
    474 
    475     if verbose: log.critical('Converting to pts format')
    476 
    477     # Scan through the header, picking up stuff we need.
    478     i = 0
    479     while lines[i].strip() == '' or lines[i].strip().startswith('#'):
    480         i += 1
    481 
    482     assert lines[i].strip().upper() == 'BEGIN HEADER:'
    483     i += 1
    484 
    485     assert lines[i].strip().upper().startswith('UNITS:')
    486     units = lines[i].strip().split()[1]
    487     i += 1
    488 
    489     assert lines[i].strip().upper().startswith('DTM TYPE:')
    490     i += 1
    491 
    492     assert lines[i].strip().upper().startswith('DTM:')
    493     i += 1
    494 
    495     assert lines[i].strip().upper().startswith('STREAM')
    496     i += 1
    497 
    498     assert lines[i].strip().upper().startswith('CROSS')
    499     i += 1
    500 
    501     assert lines[i].strip().upper().startswith('MAP PROJECTION:')
    502     projection = lines[i].strip().split(':')[1]
    503     i += 1
    504 
    505     assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
    506     zone = int(lines[i].strip().split(':')[1])
    507     i += 1
    508 
    509     assert lines[i].strip().upper().startswith('DATUM:')
    510     datum = lines[i].strip().split(':')[1]
    511     i += 1
    512 
    513     assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
    514     i += 1
    515 
    516     assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
    517     i += 1
    518 
    519     assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
    520     number_of_cross_sections = int(lines[i].strip().split(':')[1])
    521     i += 1
    522 
    523     # Now read all points
    524     points = []
    525     elevation = []
    526     for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
    527         for k, entry in enumerate(entries):
    528             points.append(entry[:2])
    529             elevation.append(entry[2])
    530 
    531     msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
    532           %(j+1, number_of_cross_sections)
    533     assert j+1 == number_of_cross_sections, msg
    534 
    535     # Get output file, write PTS data
    536     if basename_out == None:
    537         ptsname = root + '.pts'
    538     else:
    539         ptsname = basename_out + '.pts'
    540 
    541     geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
    542     geo = Geospatial_data(points, {"elevation":elevation},
    543                           verbose=verbose, geo_reference=geo_ref)
    544     geo.export_points_file(ptsname)
    545367
    546368
     
    729551
    730552
    731 ##
    732 # @brief Convert SWW file to PTS file (at selected points).
    733 # @param basename_in Stem name of input SWW file.
    734 # @param basename_out Stem name of output file.
    735 # @param data_points If given, points where quantity is to be computed.
    736 # @param quantity Name (or expression) of existing quantity(s) (def: elevation).
    737 # @param timestep If given, output quantity at that timestep.
    738 # @param reduction If given, reduce quantity by this factor.
    739 # @param NODATA_value The NODATA value (default -9999).
    740 # @param verbose True if this function is to be verbose.
    741 # @param origin ??
    742 def sww2pts(basename_in, basename_out=None,
    743             data_points=None,
    744             quantity=None,
    745             timestep=None,
    746             reduction=None,
    747             NODATA_value=-9999,
    748             verbose=False,
    749             origin=None):
    750     """Read SWW file and convert to interpolated values at selected points
    751 
    752     The parameter 'quantity' must be the name of an existing quantity or
    753     an expression involving existing quantities. The default is 'elevation'.
    754 
    755     if timestep (an index) is given, output quantity at that timestep.
    756 
    757     if reduction is given use that to reduce quantity over all timesteps.
    758 
    759     data_points (Nx2 array) give locations of points where quantity is to
    760     be computed.
    761     """
    762 
    763     import sys
    764     from anuga.geometry.polygon import inside_polygon, outside_polygon
    765     from anuga.abstract_2d_finite_volumes.util import \
    766              apply_expression_to_dictionary
    767     from anuga.geospatial_data.geospatial_data import Geospatial_data
    768 
    769     if quantity is None:
    770         quantity = 'elevation'
    771 
    772     if reduction is None:
    773         reduction = max
    774 
    775     if basename_out is None:
    776         basename_out = basename_in + '_%s' % quantity
    777 
    778     swwfile = basename_in + '.sww'
    779     ptsfile = basename_out + '.pts'
    780 
    781     # Read sww file
    782     if verbose: log.critical('Reading from %s' % swwfile)
    783     from Scientific.IO.NetCDF import NetCDFFile
    784     fid = NetCDFFile(swwfile)
    785 
    786     # Get extent and reference
    787     x = fid.variables['x'][:]
    788     y = fid.variables['y'][:]
    789     volumes = fid.variables['volumes'][:]
    790 
    791     number_of_timesteps = fid.dimensions['number_of_timesteps']
    792     number_of_points = fid.dimensions['number_of_points']
    793     if origin is None:
    794         # Get geo_reference
    795         # sww files don't have to have a geo_ref
    796         try:
    797             geo_reference = Geo_reference(NetCDFObject=fid)
    798         except AttributeError, e:
    799             geo_reference = Geo_reference() # Default georef object
    800 
    801         xllcorner = geo_reference.get_xllcorner()
    802         yllcorner = geo_reference.get_yllcorner()
    803         zone = geo_reference.get_zone()
    804     else:
    805         zone = origin[0]
    806         xllcorner = origin[1]
    807         yllcorner = origin[2]
    808 
    809     # FIXME: Refactor using code from file_function.statistics
    810     # Something like print swwstats(swwname)
    811     if verbose:
    812         x = fid.variables['x'][:]
    813         y = fid.variables['y'][:]
    814         times = fid.variables['time'][:]
    815         log.critical('------------------------------------------------')
    816         log.critical('Statistics of SWW file:')
    817         log.critical('  Name: %s' % swwfile)
    818         log.critical('  Reference:')
    819         log.critical('    Lower left corner: [%f, %f]' % (xllcorner, yllcorner))
    820         log.critical('    Start time: %f' % fid.starttime[0])
    821         log.critical('  Extent:')
    822         log.critical('    x [m] in [%f, %f], len(x) == %d'
    823                      % (num.min(x), num.max(x), len(x.flat)))
    824         log.critical('    y [m] in [%f, %f], len(y) == %d'
    825                      % (num.min(y), num.max(y), len(y.flat)))
    826         log.critical('    t [s] in [%f, %f], len(t) == %d'
    827                      % (min(times), max(times), len(times)))
    828         log.critical('  Quantities [SI units]:')
    829         for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
    830             q = fid.variables[name][:].flat
    831             log.critical('    %s in [%f, %f]' % (name, min(q), max(q)))
    832 
    833     # Get quantity and reduce if applicable
    834     if verbose: log.critical('Processing quantity %s' % quantity)
    835 
    836     # Turn NetCDF objects into numeric arrays
    837     quantity_dict = {}
    838     for name in fid.variables.keys():
    839         quantity_dict[name] = fid.variables[name][:]
    840 
    841     # Convert quantity expression to quantities found in sww file
    842     q = apply_expression_to_dictionary(quantity, quantity_dict)
    843 
    844     if len(q.shape) == 2:
    845         # q has a time component and needs to be reduced along
    846         # the temporal dimension
    847         if verbose: log.critical('Reducing quantity %s' % quantity)
    848 
    849         q_reduced = num.zeros(number_of_points, num.float)
    850         for k in range(number_of_points):
    851             q_reduced[k] = reduction(q[:,k])
    852         q = q_reduced
    853 
    854     # Post condition: Now q has dimension: number_of_points
    855     assert len(q.shape) == 1
    856     assert q.shape[0] == number_of_points
    857 
    858     if verbose:
    859         log.critical('Processed values for %s are in [%f, %f]'
    860                      % (quantity, min(q), max(q)))
    861 
    862     # Create grid and update xll/yll corner and x,y
    863     vertex_points = num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1)
    864     assert len(vertex_points.shape) == 2
    865 
    866     # Interpolate
    867     from anuga.fit_interpolate.interpolate import Interpolate
    868     interp = Interpolate(vertex_points, volumes, verbose=verbose)
    869 
    870     # Interpolate using quantity values
    871     if verbose: log.critical('Interpolating')
    872     interpolated_values = interp.interpolate(q, data_points).flatten()
    873 
    874     if verbose:
    875         log.critical('Interpolated values are in [%f, %f]'
    876                      % (num.min(interpolated_values),
    877                         num.max(interpolated_values)))
    878 
    879     # Assign NODATA_value to all points outside bounding polygon
    880     # (from interpolation mesh)
    881     P = interp.mesh.get_boundary_polygon()
    882     outside_indices = outside_polygon(data_points, P, closed=True)
    883 
    884     for i in outside_indices:
    885         interpolated_values[i] = NODATA_value
    886 
    887     # Store results
    888     G = Geospatial_data(data_points=data_points, attributes=interpolated_values)
    889 
    890     G.export_points_file(ptsfile, absolute = True)
    891 
    892     fid.close()
    893 
    894 
    895553
    896554##
     
    933591# @param very_verbose
    934592# @return
    935 def sww2domain(filename, boundary=None, t=None,
     593def load_sww_as_domain(filename, boundary=None, t=None,
    936594               fail_if_NaN=True, NaN_filler=0,
    937595               verbose=False, very_verbose=False):
     
    18821540    return d <= max_distance
    18831541
    1884 ################################################################################
    1885 # CONVERTING UNGRIDDED URS DATA TO AN SWW FILE
    1886 ################################################################################
    1887 
    1888 WAVEHEIGHT_MUX_LABEL = '-z-mux'
    1889 EAST_VELOCITY_LABEL =  '-e-mux'
    1890 NORTH_VELOCITY_LABEL =  '-n-mux'
    1891 
    1892 ##
    1893 # @brief Convert URS file(s) (wave prop) to an SWW file.
    1894 # @param basename_in Stem of the input filenames.
    1895 # @param basename_out Path to the output SWW file.
    1896 # @param verbose True if this function is to be verbose.
    1897 # @param mint
    1898 # @param maxt
    1899 # @param mean_stage
    1900 # @param origin Tuple with geo-ref UTM coordinates (zone, easting, northing).
    1901 # @param hole_points_UTM
    1902 # @param zscale
    1903 # @note Also convert latitude and longitude to UTM. All coordinates are
    1904 #       assumed to be given in the GDA94 datum.
    1905 # @note Input filename stem has suffixes '-z-mux', '-e-mux' and '-n-mux'
    1906 #       added for relative height, x-velocity and y-velocity respectively.
    1907 def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
    1908                       mint=None, maxt=None,
    1909                       mean_stage=0,
    1910                       origin=None,
    1911                       hole_points_UTM=None,
    1912                       zscale=1):
    1913     """
    1914     Convert URS C binary format for wave propagation to
    1915     sww format native to abstract_2d_finite_volumes.
    1916 
    1917     Specify only basename_in and read files of the form
    1918     basefilename-z-mux, basefilename-e-mux and
    1919     basefilename-n-mux containing relative height,
    1920     x-velocity and y-velocity, respectively.
    1921 
    1922     Also convert latitude and longitude to UTM. All coordinates are
    1923     assumed to be given in the GDA94 datum. The latitude and longitude
    1924     information is assumed ungridded grid.
    1925 
    1926     min's and max's: If omitted - full extend is used.
    1927     To include a value min ans max may equal it.
    1928     Lat and lon are assumed to be in decimal degrees.
    1929 
    1930     origin is a 3-tuple with geo referenced
    1931     UTM coordinates (zone, easting, northing)
    1932     It will be the origin of the sww file. This shouldn't be used,
    1933     since all of anuga should be able to handle an arbitary origin.
    1934     The mux point info is NOT relative to this origin.
    1935 
    1936     URS C binary format has data organised as TIME, LONGITUDE, LATITUDE
    1937     which means that latitude is the fastest
    1938     varying dimension (row major order, so to speak)
    1939 
    1940     In URS C binary the latitudes and longitudes are in assending order.
    1941 
    1942     Note, interpolations of the resulting sww file will be different
    1943     from results of urs2sww.  This is due to the interpolation
    1944     function used, and the different grid structure between urs2sww
    1945     and this function.
    1946 
    1947     Interpolating data that has an underlying gridded source can
    1948     easily end up with different values, depending on the underlying
    1949     mesh.
    1950 
    1951     consider these 4 points
    1952     50  -50
    1953 
    1954     0     0
    1955 
    1956     The grid can be
    1957      -
    1958     |\|   A
    1959      -
    1960      or;
    1961       -
    1962      |/|  B
    1963       -
    1964 
    1965     If a point is just below the center of the midpoint, it will have a
    1966     +ve value in grid A and a -ve value in grid B.
    1967     """
    1968 
    1969     from anuga.mesh_engine.mesh_engine import NoTrianglesError
    1970     from anuga.pmesh.mesh import Mesh
    1971 
    1972     files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
    1973                 basename_in + EAST_VELOCITY_LABEL,
    1974                 basename_in + NORTH_VELOCITY_LABEL]
    1975     quantities = ['HA','UA','VA']
    1976 
    1977     # instantiate urs_points of the three mux files.
    1978     mux = {}
    1979     for quantity, file in map(None, quantities, files_in):
    1980         mux[quantity] = Urs_points(file)
    1981 
    1982     # Could check that the depth is the same. (hashing)
    1983 
    1984     # handle to a mux file to do depth stuff
    1985     a_mux = mux[quantities[0]]
    1986 
    1987     # Convert to utm
    1988     lat = a_mux.lonlatdep[:,1]
    1989     long = a_mux.lonlatdep[:,0]
    1990     points_utm, zone = convert_from_latlon_to_utm(latitudes=lat,
    1991                                                   longitudes=long)
    1992 
    1993     elevation = a_mux.lonlatdep[:,2] * -1
    1994 
    1995     # grid (create a mesh from the selected points)
    1996     # This mesh has a problem.  Triangles are streched over ungridded areas.
    1997     # If these areas could be described as holes in pmesh, that would be great.
    1998 
    1999     # I can't just get the user to selection a point in the middle.
    2000     # A boundary is needed around these points.
    2001     # But if the zone of points is obvious enough auto-segment should do
    2002     # a good boundary.
    2003     mesh = Mesh()
    2004     mesh.add_vertices(points_utm)
    2005     mesh.auto_segment(smooth_indents=True, expand_pinch=True)
    2006 
    2007     # To try and avoid alpha shape 'hugging' too much
    2008     mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    2009     if hole_points_UTM is not None:
    2010         point = ensure_absolute(hole_points_UTM)
    2011         mesh.add_hole(point[0], point[1])
    2012 
    2013     try:
    2014         mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    2015     except NoTrianglesError:
    2016         # This is a bit of a hack, going in and changing the data structure.
    2017         mesh.holes = []
    2018         mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    2019 
    2020     mesh_dic = mesh.Mesh2MeshList()
    2021 
    2022     #mesh.export_mesh_file(basename_in + '_168.tsh')
    2023     #import sys; sys.exit()
    2024     # These are the times of the mux file
    2025     mux_times = []
    2026     for i in range(a_mux.time_step_count):
    2027         mux_times.append(a_mux.time_step * i)
    2028     (mux_times_start_i, mux_times_fin_i) = mux2sww_time(mux_times, mint, maxt)
    2029     times = mux_times[mux_times_start_i:mux_times_fin_i]
    2030 
    2031     if mux_times_start_i == mux_times_fin_i:
    2032         # Close the mux files
    2033         for quantity, file in map(None, quantities, files_in):
    2034             mux[quantity].close()
    2035         msg = "Due to mint and maxt there's no time info in the boundary SWW."
    2036         raise Exception, msg
    2037 
    2038     # If this raise is removed there is currently no downstream errors
    2039 
    2040     points_utm=ensure_numeric(points_utm)
    2041     assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist'])
    2042                        == ensure_numeric(points_utm))
    2043 
    2044     volumes = mesh_dic['generatedtrianglelist']
    2045 
    2046     # Write sww intro and grid stuff.
    2047     if basename_out is None:
    2048         swwname = basename_in + '.sww'
    2049     else:
    2050         swwname = basename_out + '.sww'
    2051 
    2052     if verbose: log.critical('Output to %s' % swwname)
    2053 
    2054     outfile = NetCDFFile(swwname, netcdf_mode_w)
    2055 
    2056     # For a different way of doing this, check out tsh2sww
    2057     # work out sww_times and the index range this covers
    2058     sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    2059     sww.store_header(outfile, times, len(volumes), len(points_utm),
    2060                      verbose=verbose, sww_precision=netcdf_float)
    2061     outfile.mean_stage = mean_stage
    2062     outfile.zscale = zscale
    2063 
    2064     sww.store_triangulation(outfile, points_utm, volumes,
    2065                             zone, 
    2066                             new_origin=origin,
    2067                             verbose=verbose)
    2068     sww.store_static_quantities(outfile, elevation=elevation)
    2069 
    2070     if verbose: log.critical('Converting quantities')
    2071 
    2072     # Read in a time slice from each mux file and write it to the SWW file
    2073     j = 0
    2074     for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
    2075         if j >= mux_times_start_i and j < mux_times_fin_i:
    2076             stage = zscale*ha + mean_stage
    2077             h = stage - elevation
    2078             xmomentum = ua*h
    2079             ymomentum = -1 * va * h # -1 since in mux files south is positive.
    2080             sww.store_quantities(outfile,
    2081                                  slice_index=j-mux_times_start_i,
    2082                                  verbose=verbose,
    2083                                  stage=stage,
    2084                                  xmomentum=xmomentum,
    2085                                  ymomentum=ymomentum,
    2086                                  sww_precision=num.float)
    2087         j += 1
    2088 
    2089     if verbose: sww.verbose_quantities(outfile)
    2090 
    2091     outfile.close()
    2092 
    2093     # Do some conversions while writing the sww file
    20941542
    20951543
     
    22141662# @param maxt ??
    22151663# @return ??
    2216 def mux2sww_time(mux_times, mint, maxt):
     1664def read_time_from_mux(mux_times, mint, maxt):
    22171665    """
    22181666    """
     
    22331681
    22341682
    2235 ##
    2236 # @brief Convert a URS (mux2, wave propagation) file to an STS file.
    2237 # @param basename_in String (or list) of source file stems.
    2238 # @param basename_out Stem of output STS file path.
    2239 # @param weights
    2240 # @param verbose True if this function is to be verbose.
    2241 # @param origin Tuple with with geo-ref UTM coords (zone, easting, northing).
    2242 # @param zone
    2243 # @param mean_stage
    2244 # @param zscale
    2245 # @param ordering_filename Path of a file specifying which mux2 gauge points are
    2246 #                          to be stored.
    2247 # @note Also convert latitude and longitude to UTM. All coordinates are
    2248 #       assumed to be given in the GDA94 datum.
    2249 def urs2sts(basename_in, basename_out=None,
    2250             weights=None,
    2251             verbose=False,
    2252             origin=None,
    2253             zone=None,
    2254             central_meridian=None,           
    2255             mean_stage=0.0,
    2256             zscale=1.0,
    2257             ordering_filename=None):
    2258     """Convert URS mux2 format for wave propagation to sts format
    2259 
    2260     Also convert latitude and longitude to UTM. All coordinates are
    2261     assumed to be given in the GDA94 datum
    2262 
    2263     origin is a 3-tuple with geo referenced
    2264     UTM coordinates (zone, easting, northing)
    2265 
    2266     inputs:
    2267 
    2268     basename_in: list of source file prefixes
    2269 
    2270         These are combined with the extensions:
    2271         WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
    2272         EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum
    2273         NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum
    2274 
    2275         to create a 2D list of mux2 file. The rows are associated with each
    2276         quantity and must have the above extensions
    2277         the columns are the list of file prefixes.
    2278 
    2279     ordering: a .txt file name specifying which mux2 gauge points are
    2280               to be stored. This is indicated by the index of the gauge
    2281               in the ordering file.
    2282 
    2283               ordering file format:
    2284               1st line:    'index,longitude,latitude\n'
    2285               other lines: index,longitude,latitude
    2286 
    2287               If ordering is None or ordering file is empty then
    2288                all points are taken in the order they
    2289               appear in the mux2 file.
    2290 
    2291 
    2292     output:
    2293       basename_out: name of sts file in which mux2 data is stored.
    2294      
    2295      
    2296      
    2297     NOTE: South is positive in mux files so sign of y-component of velocity is reverted
    2298     """
    2299 
    2300     import os
    2301     from Scientific.IO.NetCDF import NetCDFFile
    2302     from types import ListType,StringType
    2303     from operator import __and__
    2304 
    2305     if not isinstance(basename_in, ListType):
    2306         if verbose: log.critical('Reading single source')
    2307         basename_in = [basename_in]
    2308 
    2309     # This is the value used in the mux file format to indicate NAN data
    2310     # FIXME (Ole): This should be changed everywhere to IEEE NAN when
    2311     #              we upgrade to Numpy
    2312     NODATA = 99
    2313 
    2314     # Check that basename is a list of strings
    2315     if not reduce(__and__, map(lambda z:isinstance(z,StringType), basename_in)):
    2316         msg= 'basename_in must be a string or list of strings'
    2317         raise Exception, msg
    2318 
    2319     # Find the number of sources to be used
    2320     numSrc = len(basename_in)
    2321 
    2322     # A weight must be specified for each source
    2323     if weights is None:
    2324         # Default is equal weighting
    2325         weights = num.ones(numSrc, num.float) / numSrc
    2326     else:
    2327         weights = ensure_numeric(weights)
    2328         msg = 'When combining multiple sources must specify a weight for ' \
    2329               'mux2 source file'
    2330         assert len(weights) == numSrc, msg
    2331 
    2332     if verbose: log.critical('Weights used in urs2sts: %s' % str(weights))
    2333 
    2334     # Check output filename
    2335     if basename_out is None:
    2336         msg = 'STS filename must be specified as basename_out ' \
    2337               'in function urs2sts'
    2338         raise Exception, msg
    2339 
    2340     if basename_out.endswith('.sts'):
    2341         stsname = basename_out
    2342     else:
    2343         stsname = basename_out + '.sts'
    2344 
    2345     # Create input filenames from basenames and check their existence
    2346     files_in = [[], [], []]
    2347     for files in basename_in:
    2348         files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
    2349         files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
    2350         files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)
    2351 
    2352     quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format
    2353     for i in range(len(quantities)):
    2354         for file_in in files_in[i]:
    2355             if (os.access(file_in, os.R_OK) == 0):
    2356                 msg = 'File %s does not exist or is not accessible' % file_in
    2357                 raise IOError, msg
    2358 
    2359     # Establish permutation array
    2360     if ordering_filename is not None:
    2361         if verbose is True: log.critical('Reading ordering file %s'
    2362                                          % ordering_filename)
    2363 
    2364         # Read ordering file
    2365         try:
    2366             fid = open(ordering_filename, 'r')
    2367             file_header = fid.readline().split(',')
    2368             ordering_lines = fid.readlines()
    2369             fid.close()
    2370         except:
    2371             msg = 'Cannot open %s' % ordering_filename
    2372             raise Exception, msg
    2373 
    2374         reference_header = 'index, longitude, latitude\n'
    2375         reference_header_split = reference_header.split(',')
    2376         for i in range(3):
    2377             if not file_header[i].strip() == reference_header_split[i].strip():
    2378                 msg = 'File must contain header: ' + reference_header
    2379                 raise Exception, msg
    2380 
    2381         if len(ordering_lines) < 2:
    2382             msg = 'File must contain at least two points'
    2383             raise Exception, msg
    2384 
    2385         permutation = [int(line.split(',')[0]) for line in ordering_lines]
    2386         permutation = ensure_numeric(permutation)
    2387     else:
    2388         permutation = None
    2389 
    2390     # Read MUX2 files
    2391     if (verbose): log.critical('reading mux2 file')
    2392 
    2393     mux={}
    2394     for i, quantity in enumerate(quantities):
    2395         # For each quantity read the associated list of source mux2 file with
    2396         # extention associated with that quantity
    2397 
    2398         times, latitudes, longitudes, elevation, mux[quantity], starttime \
    2399             = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)
    2400 
    2401         # Check that all quantities have consistent time and space information
    2402         if quantity != quantities[0]:
    2403             msg = '%s, %s and %s have inconsistent gauge data' \
    2404                   % (files_in[0], files_in[1], files_in[2])
    2405             assert num.allclose(times, times_old), msg
    2406             assert num.allclose(latitudes, latitudes_old), msg
    2407             assert num.allclose(longitudes, longitudes_old), msg
    2408             assert num.allclose(elevation, elevation_old), msg
    2409             assert num.allclose(starttime, starttime_old), msg
    2410         times_old = times
    2411         latitudes_old = latitudes
    2412         longitudes_old = longitudes
    2413         elevation_old = elevation
    2414         starttime_old = starttime
    2415 
    2416         # Self check - can be removed to improve speed
    2417         #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]
    2418         #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]
    2419         #
    2420         #msg = 'Longitudes specified in ordering file do not match those ' \
    2421         #      'found in mux files. ' \
    2422         #      'I got %s instead of %s (only beginning shown)' \
    2423         #      % (str(longitudes[:10]) + '...',
    2424         #         str(ref_longitudes[:10]) + '...')
    2425         #assert allclose(longitudes, ref_longitudes), msg
    2426         #
    2427         #msg = 'Latitudes specified in ordering file do not match those ' \
    2428         #      'found in mux files. '
    2429         #      'I got %s instead of %s (only beginning shown)' \
    2430         #      % (str(latitudes[:10]) + '...',
    2431         #         str(ref_latitudes[:10]) + '...')
    2432         #assert allclose(latitudes, ref_latitudes), msg
    2433 
    2434     # Store timeseries in STS file
    2435     msg = 'File is empty and or clipped region not in file region'
    2436     assert len(latitudes > 0), msg
    2437 
    2438     number_of_points = latitudes.shape[0]      # Number of stations retrieved
    2439     number_of_times = times.shape[0]           # Number of timesteps
    2440     number_of_latitudes = latitudes.shape[0]   # Number latitudes
    2441     number_of_longitudes = longitudes.shape[0] # Number longitudes
    2442 
    2443     # The permutation vector of contains original indices
    2444     # as given in ordering file or None in which case points
    2445     # are assigned the trivial indices enumerating them from
    2446     # 0 to number_of_points-1
    2447     if permutation is None:
    2448         permutation = num.arange(number_of_points, dtype=num.int)
    2449 
    2450     # NetCDF file definition
    2451     outfile = NetCDFFile(stsname, netcdf_mode_w)
    2452 
    2453     description = 'Converted from URS mux2 files: %s' % basename_in
    2454 
    2455     # Create new file
    2456     sts = Write_sts()
    2457     sts.store_header(outfile,
    2458                      times+starttime,
    2459                      number_of_points,
    2460                      description=description,
    2461                      verbose=verbose,
    2462                      sts_precision=netcdf_float)
    2463 
    2464     # Store
    2465     from anuga.coordinate_transforms.redfearn import redfearn
    2466 
    2467     x = num.zeros(number_of_points, num.float)  # Easting
    2468     y = num.zeros(number_of_points, num.float)  # Northing
    2469 
    2470     # Check zone boundaries
    2471     if zone is None:
    2472         refzone, _, _ = redfearn(latitudes[0], longitudes[0],
    2473                                  central_meridian=central_meridian)
    2474     else:
    2475         refzone = zone
    2476 
    2477     old_zone = refzone
    2478 
    2479     for i in range(number_of_points):
    2480         computed_zone, easting, northing = redfearn(latitudes[i], longitudes[i],
    2481                                                     zone=zone,
    2482                                                     central_meridian=central_meridian)
    2483         x[i] = easting
    2484         y[i] = northing
    2485         if computed_zone != refzone:
    2486             msg = 'All sts gauges need to be in the same zone. \n'
    2487             msg += 'offending gauge:Zone %d,%.4f, %4f\n' \
    2488                    % (computed_zone, easting, northing)
    2489             msg += 'previous gauge:Zone %d,%.4f, %4f' \
    2490                    % (old_zone, old_easting, old_northing)
    2491             raise Exception, msg
    2492         old_zone = computed_zone
    2493         old_easting = easting
    2494         old_northing = northing
    2495 
    2496     if origin is None:
    2497         origin = Geo_reference(refzone, min(x), min(y))
    2498     geo_ref = write_NetCDF_georeference(origin, outfile)
    2499 
    2500     elevation = num.resize(elevation, outfile.variables['elevation'][:].shape)
    2501     outfile.variables['permutation'][:] = permutation.astype(num.int32) # Opteron 64
    2502     outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    2503     outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    2504     outfile.variables['elevation'][:] = elevation
    2505 
    2506     stage = outfile.variables['stage']
    2507     xmomentum = outfile.variables['xmomentum']
    2508     ymomentum = outfile.variables['ymomentum']
    2509 
    2510     if verbose: log.critical('Converting quantities')
    2511 
    2512     for j in range(len(times)):
    2513         for i in range(number_of_points):
    2514             ha = mux['HA'][i,j]
    2515             ua = mux['UA'][i,j]
    2516             va = mux['VA'][i,j]
    2517             if ha == NODATA:
    2518                 if verbose:
    2519                     msg = 'Setting nodata value %d to 0 at time = %f, ' \
    2520                           'point = %d' % (ha, times[j], i)
    2521                     log.critical(msg)
    2522                 ha = 0.0
    2523                 ua = 0.0
    2524                 va = 0.0
    2525 
    2526             w = zscale*ha + mean_stage
    2527             h = w - elevation[i]
    2528             stage[j,i] = w
    2529 
    2530             xmomentum[j,i] = ua * h
    2531             ymomentum[j,i] = -va * h # South is positive in mux files
    2532 
    2533 
    2534     outfile.close()
    2535 
    2536 
    2537 ##
    2538 # @brief Create a list of points defining a boundary from an STS file.
    2539 # @param stsname Stem of path to the STS file to read.
    2540 # @return A list of boundary points.
    2541 def create_sts_boundary(stsname):
    2542     """Create a list of points defining a boundary from an STS file.
    2543 
    2544     Create boundary segments from .sts file. Points can be stored in
    2545     arbitrary order within the .sts file. The order in which the .sts points
    2546     make up the boundary are given in order.txt file
    2547 
    2548     FIXME:
    2549     Point coordinates are stored in relative eastings and northings.
    2550     But boundary is produced in absolute coordinates
    2551     """
    2552 
    2553     try:
    2554         fid = NetCDFFile(stsname + '.sts', netcdf_mode_r)
    2555     except:
    2556         msg = 'Cannot open %s' % stsname + '.sts'
    2557         raise msg
    2558 
    2559     xllcorner = fid.xllcorner[0]
    2560     yllcorner = fid.yllcorner[0]
    2561 
    2562     #Points stored in sts file are normalised to [xllcorner,yllcorner] but
    2563     #we cannot assume that boundary polygon will be. At least the
    2564     #additional points specified by the user after this function is called
    2565     x = fid.variables['x'][:] + xllcorner
    2566     y = fid.variables['y'][:] + yllcorner
    2567 
    2568     x = num.reshape(x, (len(x),1))
    2569     y = num.reshape(y, (len(y),1))
    2570     sts_points = num.concatenate((x,y), axis=1)
    2571 
    2572     return sts_points.tolist()
    2573 
    2574 
    2575 ##
    2576 # @brief Obsolete?
    2577 # @param outfile
    2578 # @param has
    2579 # @param uas
    2580 # @param vas
    2581 # @param elevation
    2582 # @param mean_stage
    2583 # @param zscale
    2584 # @param verbose
    2585 def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
    2586                                    mean_stage=0, zscale=1,
    2587                                    verbose=False):
    2588     #Time stepping
    2589     stage = outfile.variables['stage']
    2590     xmomentum = outfile.variables['xmomentum']
    2591     ymomentum = outfile.variables['ymomentum']
    2592 
    2593     n = len(has)
    2594     j = 0
    2595     for ha, ua, va in map(None, has, uas, vas):
    2596         if verbose and j % ((n+10)/10) == 0: log.critical('  Doing %d of %d'
    2597                                                           % (j, n))
    2598         w = zscale*ha + mean_stage
    2599         stage[j] = w
    2600         h = w - elevation
    2601         xmomentum[j] = ua * h
    2602         ymomentum[j] = -1 * va * h  # -1 since in mux files south is positive.
    2603         j += 1
    2604 
    2605 
    2606 ##
    2607 # @brief Convert a set of URS files to a text file.
    2608 # @param basename_in Stem path to the 3 URS files.
    2609 # @param location_index ??
    2610 def urs2txt(basename_in, location_index=None):
    2611     """
    2612     Not finished or tested
    2613     """
    2614 
    2615     files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
    2616                 basename_in + EAST_VELOCITY_LABEL,
    2617                 basename_in + NORTH_VELOCITY_LABEL]
    2618     quantities = ['HA','UA','VA']
    2619 
    2620     d = ","
    2621 
    2622     # instantiate urs_points of the three mux files.
    2623     mux = {}
    2624     for quantity, file in map(None, quantities, files_in):
    2625         mux[quantity] = Urs_points(file)
    2626 
    2627     # Could check that the depth is the same. (hashing)
    2628 
    2629     # handle to a mux file to do depth stuff
    2630     a_mux = mux[quantities[0]]
    2631 
    2632     # Convert to utm
    2633     latitudes = a_mux.lonlatdep[:,1]
    2634     longitudes = a_mux.lonlatdep[:,0]
    2635     points_utm, zone = \
    2636         convert_from_latlon_to_utm(latitudes=latitudes, longitudes=longitudes)
    2637     depths = a_mux.lonlatdep[:,2]
    2638 
    2639     # open the output text file, start writing.
    2640     fid = open(basename_in + '.txt', 'w')
    2641 
    2642     fid.write("zone: " + str(zone) + "\n")
    2643 
    2644     if location_index is not None:
    2645         #Title
    2646         li = location_index
    2647         fid.write('location_index' + d + 'lat' + d + 'long' + d +
    2648                   'Easting' + d + 'Northing' + '\n')
    2649         fid.write(str(li) + d + str(latitudes[li]) + d +
    2650                   str(longitudes[li]) + d + str(points_utm[li][0]) + d +
    2651                   str(points_utm[li][01]) + '\n')
    2652 
    2653     # the non-time dependent stuff
    2654     #Title
    2655     fid.write('location_index' + d + 'lat' + d + 'long' + d +
    2656               'Easting' + d + 'Northing' + d + 'depth m' + '\n')
    2657     i = 0
    2658     for depth, point_utm, lat, long in map(None, depths, points_utm,
    2659                                            latitudes, longitudes):
    2660 
    2661         fid.write(str(i) + d + str(lat) + d + str(long) + d +
    2662                   str(point_utm[0]) + d + str(point_utm[01]) + d +
    2663                   str(depth) + '\n')
    2664         i += 1
    2665 
    2666     #Time dependent
    2667     if location_index is not None:
    2668         time_step = a_mux.time_step
    2669         i = 0
    2670         #Title
    2671         fid.write('time' + d + 'HA depth m' + d + 'UA momentum East x m/sec' +
    2672                   d + 'VA momentum North y m/sec' + '\n')
    2673         for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
    2674             fid.write(str(i*time_step) + d + str(HA[location_index]) + d +
    2675                       str(UA[location_index]) + d +
    2676                       str(VA[location_index]) + '\n')
    2677             i += 1
    2678 
    2679 
    2680 
    2681 
    2682 ##
    2683 # @brief A class to write STS files.
    2684 class Write_sts:
    2685     sts_quantities = ['stage','xmomentum','ymomentum']
    2686     RANGE = '_range'
    2687     EXTREMA = ':extrema'
    2688 
    2689     ##
    2690     # @brief Instantiate this STS writer class.
    2691     def __init__(self):
    2692         pass
    2693 
    2694     ##
    2695     # @brief Write a header to the underlying data file.
    2696     # @param outfile Handle to open file to write.
    2697     # @param times A list of the time slice times *or* a start time.
    2698     # @param number_of_points The number of URS gauge sites.
    2699     # @param description Description string to write into the STS file.
    2700     # @param sts_precision Format of data to write (netcdf constant ONLY).
    2701     # @param verbose True if this function is to be verbose.
    2702     # @note If 'times' is a list, the info will be made relative.
    2703     def store_header(self,
    2704                      outfile,
    2705                      times,
    2706                      number_of_points,
    2707                      description='Converted from URS mux2 format',
    2708                      sts_precision=netcdf_float32,
    2709                      verbose=False):
    2710         """
    2711         outfile - the name of the file that will be written
    2712         times - A list of the time slice times OR a start time
    2713         Note, if a list is given the info will be made relative.
    2714         number_of_points - the number of urs gauges sites
    2715         """
    2716 
    2717         outfile.institution = 'Geoscience Australia'
    2718         outfile.description = description
    2719 
    2720         try:
    2721             revision_number = get_revision_number()
    2722         except:
    2723             revision_number = None
    2724 
    2725         # Allow None to be stored as a string
    2726         outfile.revision_number = str(revision_number)
    2727 
    2728         # Start time in seconds since the epoch (midnight 1/1/1970)
    2729         # This is being used to seperate one number from a list.
    2730         # what it is actually doing is sorting lists from numeric arrays.
    2731         if isinstance(times, (list, num.ndarray)):
    2732             number_of_times = len(times)
    2733             times = ensure_numeric(times)
    2734             if number_of_times == 0:
    2735                 starttime = 0
    2736             else:
    2737                 starttime = times[0]
    2738                 times = times - starttime  #Store relative times
    2739         else:
    2740             number_of_times = 0
    2741             starttime = times
    2742 
    2743         outfile.starttime = starttime
    2744 
    2745         # Dimension definitions
    2746         outfile.createDimension('number_of_points', number_of_points)
    2747         outfile.createDimension('number_of_timesteps', number_of_times)
    2748         outfile.createDimension('numbers_in_range', 2)
    2749 
    2750         # Variable definitions
    2751         outfile.createVariable('permutation', netcdf_int, ('number_of_points',))
    2752         outfile.createVariable('x', sts_precision, ('number_of_points',))
    2753         outfile.createVariable('y', sts_precision, ('number_of_points',))
    2754         outfile.createVariable('elevation',sts_precision, ('number_of_points',))
    2755 
    2756         q = 'elevation'
    2757         outfile.createVariable(q + Write_sts.RANGE, sts_precision,
    2758                                ('numbers_in_range',))
    2759 
    2760         # Initialise ranges with small and large sentinels.
    2761         # If this was in pure Python we could have used None sensibly
    2762         outfile.variables[q + Write_sts.RANGE][0] = max_float  # Min
    2763         outfile.variables[q + Write_sts.RANGE][1] = -max_float # Max
    2764 
    2765         # Doing sts_precision instead of Float gives cast errors.
    2766         outfile.createVariable('time', netcdf_float, ('number_of_timesteps',))
    2767 
    2768         for q in Write_sts.sts_quantities:
    2769             outfile.createVariable(q, sts_precision, ('number_of_timesteps',
    2770                                                       'number_of_points'))
    2771             outfile.createVariable(q + Write_sts.RANGE, sts_precision,
    2772                                    ('numbers_in_range',))
    2773             # Initialise ranges with small and large sentinels.
    2774             # If this was in pure Python we could have used None sensibly
    2775             outfile.variables[q + Write_sts.RANGE][0] = max_float  # Min
    2776             outfile.variables[q + Write_sts.RANGE][1] = -max_float # Max
    2777 
    2778         if isinstance(times, (list, num.ndarray)):
    2779             outfile.variables['time'][:] = times    #Store time relative
    2780 
    2781         if verbose:
    2782             log.critical('------------------------------------------------')
    2783             log.critical('Statistics:')
    2784             log.critical('    t in [%f, %f], len(t) == %d'
    2785                          % (num.min(times), num.max(times), len(times.flat)))
    2786 
    2787     ##
    2788     # @brief
    2789     # @param outfile
    2790     # @param points_utm
    2791     # @param elevation
    2792     # @param zone
    2793     # @param new_origin
    2794     # @param points_georeference
    2795     # @param verbose True if this function is to be verbose.
    2796     def store_points(self,
    2797                      outfile,
    2798                      points_utm,
    2799                      elevation, zone=None, new_origin=None,
    2800                      points_georeference=None, verbose=False):
    2801 
    2802         """
    2803         points_utm - currently a list or array of the points in UTM.
    2804         points_georeference - the georeference of the points_utm
    2805 
    2806         How about passing new_origin and current_origin.
    2807         If you get both, do a convertion from the old to the new.
    2808 
    2809         If you only get new_origin, the points are absolute,
    2810         convert to relative
    2811 
    2812         if you only get the current_origin the points are relative, store
    2813         as relative.
    2814 
    2815         if you get no georefs create a new georef based on the minimums of
    2816         points_utm.  (Another option would be to default to absolute)
    2817 
    2818         Yes, and this is done in another part of the code.
    2819         Probably geospatial.
    2820 
    2821         If you don't supply either geo_refs, then supply a zone. If not
    2822         the default zone will be used.
    2823 
    2824         precondition:
    2825              header has been called.
    2826         """
    2827 
    2828         number_of_points = len(points_utm)
    2829         points_utm = num.array(points_utm)
    2830 
    2831         # given the two geo_refs and the points, do the stuff
    2832         # described in the method header
    2833         points_georeference = ensure_geo_reference(points_georeference)
    2834         new_origin = ensure_geo_reference(new_origin)
    2835 
    2836         if new_origin is None and points_georeference is not None:
    2837             points = points_utm
    2838             geo_ref = points_georeference
    2839         else:
    2840             if new_origin is None:
    2841                 new_origin = Geo_reference(zone, min(points_utm[:,0]),
    2842                                                  min(points_utm[:,1]))
    2843             points = new_origin.change_points_geo_ref(points_utm,
    2844                                                       points_georeference)
    2845             geo_ref = new_origin
    2846 
    2847         # At this stage I need a georef and points
    2848         # the points are relative to the georef
    2849         geo_ref.write_NetCDF(outfile)
    2850 
    2851         x = points[:,0]
    2852         y = points[:,1]
    2853         z = outfile.variables['elevation'][:]
    2854 
    2855         if verbose:
    2856             log.critical('------------------------------------------------')
    2857             log.critical('More Statistics:')
    2858             log.critical('  Extent (/lon):')
    2859             log.critical('    x in [%f, %f], len(lat) == %d'
    2860                          % (min(x), max(x), len(x)))
    2861             log.critical('    y in [%f, %f], len(lon) == %d'
    2862                          % (min(y), max(y), len(y)))
    2863             log.critical('    z in [%f, %f], len(z) == %d'
    2864                          % (min(elevation), max(elevation), len(elevation)))
    2865             log.critical('geo_ref: %s' % str(geo_ref))
    2866             log.critical('------------------------------------------------')
    2867 
    2868         z = resize(bath_grid,outfile.variables['elevation'][:].shape)
    2869         outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
    2870         outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
    2871         #outfile.variables['z'][:] = elevation
    2872         outfile.variables['elevation'][:] = elevation  #FIXME HACK4
    2873 
    2874         # This updates the _range values
    2875         q = 'elevation'
    2876         outfile.variables[q + Write_sts.RANGE][0] = min(elevation)
    2877         outfile.variables[q + Write_sts.RANGE][1] = max(elevation)
    2878 
    2879     ##
    2880     # @brief Store quantity data in the underlying file.
    2881     # @param outfile
    2882     # @param sts_precision
    2883     # @param slice_index
    2884     # @param time
    2885     # @param verboseTrue if this function is to be verbose.
    2886     # @param **quant Extra keyword args.
    2887     def store_quantities(self, outfile, sts_precision=num.float32,
    2888                          slice_index=None, time=None,
    2889                          verbose=False, **quant):
    2890         """Write the quantity info.
    2891 
    2892         **quant is extra keyword arguments passed in. These must be
    2893           the sts quantities, currently; stage.
    2894 
    2895         if the time array is already been built, use the slice_index
    2896         to specify the index.
    2897 
    2898         Otherwise, use time to increase the time dimension
    2899 
    2900         Maybe make this general, but the viewer assumes these quantities,
    2901         so maybe we don't want it general - unless the viewer is general
    2902 
    2903         precondition:
    2904             triangulation and header have been called.
    2905         """
    2906 
    2907         if time is not None:
    2908             file_time = outfile.variables['time']
    2909             slice_index = len(file_time)
    2910             file_time[slice_index] = time
    2911 
    2912         # Write the conserved quantities from Domain.
    2913         # Typically stage, xmomentum, ymomentum
    2914         # other quantities will be ignored, silently.
    2915         # Also write the ranges: stage_range
    2916         for q in Write_sts.sts_quantities:
    2917             if not quant.has_key(q):
    2918                 msg = 'STS file can not write quantity %s' % q
    2919                 raise NewQuantity, msg
    2920             else:
    2921                 q_values = quant[q]
    2922                 outfile.variables[q][slice_index] = \
    2923                                 q_values.astype(sts_precision)
    2924 
    2925                 # This updates the _range values
    2926                 q_range = outfile.variables[q + Write_sts.RANGE][:]
    2927                 q_values_min = num.min(q_values)
    2928                 if q_values_min < q_range[0]:
    2929                     outfile.variables[q + Write_sts.RANGE][0] = q_values_min
    2930                 q_values_max = num.max(q_values)
    2931                 if q_values_max > q_range[1]:
    2932                     outfile.variables[q + Write_sts.RANGE][1] = q_values_max
    2933 
    2934 
    2935 ##
    2936 # @brief
    2937 class Urs_points:
    2938     """
    2939     Read the info in URS mux files.
    2940 
    2941     for the quantities here's a correlation between the file names and
    2942     what they mean;
    2943     z-mux is height above sea level, m
    2944     e-mux is velocity is Eastern direction, m/s
    2945     n-mux is velocity is Northern direction, m/s
    2946     """
    2947 
    2948     ##
    2949     # @brief Initialize this instance of Urs_points.
    2950     # @param urs_file Path to the underlying data file.
    2951     def __init__(self, urs_file):
    2952         self.iterated = False
    2953         columns = 3                         # long, lat , depth
    2954         mux_file = open(urs_file, 'rb')
    2955 
    2956         # Number of points/stations
    2957         (self.points_num,) = unpack('i', mux_file.read(4))
    2958 
    2959         # nt, int - Number of time steps
    2960         (self.time_step_count,) = unpack('i', mux_file.read(4))
    2961         #dt, float - time step, seconds
    2962         (self.time_step,) = unpack('f', mux_file.read(4))
    2963         msg = "Bad data in the urs file."
    2964         if self.points_num < 0:
    2965             mux_file.close()
    2966             raise ANUGAError, msg
    2967         if self.time_step_count < 0:
    2968             mux_file.close()
    2969             raise ANUGAError, msg
    2970         if self.time_step < 0:
    2971             mux_file.close()
    2972             raise ANUGAError, msg
    2973 
    2974         # The depth is in meters, and it is the distance from the ocean
    2975         # to the sea bottom.
    2976         lonlatdep = p_array.array('f')
    2977         lonlatdep.read(mux_file, columns * self.points_num)
    2978         lonlatdep = num.array(lonlatdep, dtype=num.float)
    2979         lonlatdep = num.reshape(lonlatdep, (self.points_num, columns))
    2980         self.lonlatdep = lonlatdep
    2981 
    2982         self.mux_file = mux_file
    2983         # check this array
    2984 
    2985     ##
    2986     # @brief Allow iteration over quantity data wrt time.
    2987     def __iter__(self):
    2988         """
    2989         iterate over quantity data which is with respect to time.
    2990 
    2991         Note: You can only iterate once over an object
    2992 
    2993         returns quantity infomation for each time slice
    2994         """
    2995 
    2996         msg =  "You can only interate once over a urs file."
    2997         assert not self.iterated, msg
    2998 
    2999         self.iter_time_step = 0
    3000         self.iterated = True
    3001 
    3002         return self
    3003 
    3004     ##
    3005     # @brief
    3006     def next(self):
    3007         if self.time_step_count == self.iter_time_step:
    3008             self.close()
    3009             raise StopIteration
    3010 
    3011         #Read in a time slice from mux file
    3012         hz_p_array = p_array.array('f')
    3013         hz_p_array.read(self.mux_file, self.points_num)
    3014         hz_p = num.array(hz_p_array, dtype=num.float)
    3015         self.iter_time_step += 1
    3016 
    3017         return hz_p
    3018 
    3019     ##
    3020     # @brief Close the mux file.
    3021     def close(self):
    3022         self.mux_file.close()
    3023 
    3024 ################################################################################
    3025 # END URS UNGRIDDED 2 SWW
    3026 ################################################################################
     1683
    30271684
    30281685
  • trunk/anuga_core/source/anuga/shallow_water/test_data_manager.py

    r7745 r7759  
    1818from anuga.shallow_water.data_manager import *
    1919from anuga.shallow_water.sww_file import SWW_file
    20 from anuga.file_conversion.file_conversion import tsh2sww, \
    21                         asc_csiro2sww, pmesh_to_domain_instance
    2220from anuga.coordinate_transforms.geo_reference import Geo_reference                       
    2321from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees
     
    738736
    739737
    740     def test_hecras_cross_sections2pts(self):
    741         """Test conversion from HECRAS cross sections in ascii format
    742         to native NetCDF pts format
    743         """
    744 
    745         import time, os
    746         from Scientific.IO.NetCDF import NetCDFFile
    747 
    748         #Write test asc file
    749         root = 'hecrastest'
    750 
    751         filename = root+'.sdf'
    752         fid = open(filename, 'w')
    753         fid.write("""
    754 # RAS export file created on Mon 15Aug2005 11:42
    755 # by HEC-RAS Version 3.1.1
    756 
    757 BEGIN HEADER:
    758   UNITS: METRIC
    759   DTM TYPE: TIN
    760   DTM: v:\1\cit\perth_topo\river_tin
    761   STREAM LAYER: c:\\x_local\hecras\21_02_03\up_canning_cent3d.shp
    762   CROSS-SECTION LAYER: c:\\x_local\hecras\21_02_03\up_can_xs3d.shp
    763   MAP PROJECTION: UTM
    764   PROJECTION ZONE: 50
    765   DATUM: AGD66
    766   VERTICAL DATUM:
    767   NUMBER OF REACHES:  19
    768   NUMBER OF CROSS-SECTIONS:  2
    769 END HEADER:
    770 
    771 
    772 BEGIN CROSS-SECTIONS:
    773 
    774   CROSS-SECTION:
    775     STREAM ID:Southern-Wungong
    776     REACH ID:Southern-Wungong
    777     STATION:21410
    778     CUT LINE:
    779       407546.08 , 6437277.542
    780       407329.32 , 6437489.482
    781       407283.11 , 6437541.232
    782     SURFACE LINE:
    783      407546.08,   6437277.54,   52.14
    784      407538.88,   6437284.58,   51.07
    785      407531.68,   6437291.62,   50.56
    786      407524.48,   6437298.66,   49.58
    787      407517.28,   6437305.70,   49.09
    788      407510.08,   6437312.74,   48.76
    789   END:
    790 
    791   CROSS-SECTION:
    792     STREAM ID:Swan River
    793     REACH ID:Swan Mouth
    794     STATION:840.*
    795     CUT LINE:
    796       381178.0855 , 6452559.0685
    797       380485.4755 , 6453169.272
    798     SURFACE LINE:
    799      381178.09,   6452559.07,   4.17
    800      381169.49,   6452566.64,   4.26
    801      381157.78,   6452576.96,   4.34
    802      381155.97,   6452578.56,   4.35
    803      381143.72,   6452589.35,   4.43
    804      381136.69,   6452595.54,   4.58
    805      381114.74,   6452614.88,   4.41
    806      381075.53,   6452649.43,   4.17
    807      381071.47,   6452653.00,   3.99
    808      381063.46,   6452660.06,   3.67
    809      381054.41,   6452668.03,   3.67
    810   END:
    811 END CROSS-SECTIONS:
    812 """)
    813 
    814         fid.close()
    815 
    816 
    817         #Convert to NetCDF pts
    818         hecras_cross_sections2pts(root)
    819 
    820         #Check contents
    821         #Get NetCDF
    822         fid = NetCDFFile(root+'.pts', netcdf_mode_r)
    823 
    824         # Get the variables
    825         #print fid.variables.keys()
    826         points = fid.variables['points']
    827         elevation = fid.variables['elevation']
    828 
    829         #Check values
    830         ref_points = [[407546.08, 6437277.54],
    831                       [407538.88, 6437284.58],
    832                       [407531.68, 6437291.62],
    833                       [407524.48, 6437298.66],
    834                       [407517.28, 6437305.70],
    835                       [407510.08, 6437312.74]]
    836 
    837         ref_points += [[381178.09, 6452559.07],
    838                        [381169.49, 6452566.64],
    839                        [381157.78, 6452576.96],
    840                        [381155.97, 6452578.56],
    841                        [381143.72, 6452589.35],
    842                        [381136.69, 6452595.54],
    843                        [381114.74, 6452614.88],
    844                        [381075.53, 6452649.43],
    845                        [381071.47, 6452653.00],
    846                        [381063.46, 6452660.06],
    847                        [381054.41, 6452668.03]]
    848 
    849 
    850         ref_elevation = [52.14, 51.07, 50.56, 49.58, 49.09, 48.76]
    851         ref_elevation += [4.17, 4.26, 4.34, 4.35, 4.43, 4.58, 4.41, 4.17, 3.99, 3.67, 3.67]
    852 
    853         #print points[:]
    854         #print ref_points
    855         assert num.allclose(points, ref_points)
    856 
    857         #print attributes[:]
    858         #print ref_elevation
    859         assert num.allclose(elevation, ref_elevation)
    860 
    861         #Cleanup
    862         fid.close()
    863 
    864 
    865         os.remove(root + '.sdf')
    866         os.remove(root + '.pts')
    867 
    868738
    869739    def test_export_grid(self):
     
    13591229        os.remove(ascfile)
    13601230        os.remove(swwfile2)
    1361 
    1362 
    1363 
    1364     def test_sww2pts_centroids(self):
    1365         """Test that sww information can be converted correctly to pts data at specified coordinates
    1366         - in this case, the centroids.
    1367         """
    1368 
    1369         import time, os
    1370         from Scientific.IO.NetCDF import NetCDFFile
    1371         # Used for points that lie outside mesh
    1372         NODATA_value = 1758323
    1373 
    1374         # Setup
    1375         self.domain.set_name('datatest')
    1376 
    1377         ptsfile = self.domain.get_name() + '_elevation.pts'
    1378         swwfile = self.domain.get_name() + '.sww'
    1379 
    1380         self.domain.set_datadir('.')
    1381         self.domain.format = 'sww'
    1382         self.smooth = True #self.set_store_vertices_uniquely(False)
    1383         self.domain.set_quantity('elevation', lambda x,y: -x-y)
    1384 
    1385         self.domain.geo_reference = Geo_reference(56,308500,6189000)
    1386 
    1387         sww = SWW_file(self.domain)
    1388         sww.store_connectivity()
    1389         sww.store_timestep()
    1390 
    1391         #self.domain.tight_slope_limiters = 1
    1392         self.domain.evolve_to_end(finaltime = 0.01)
    1393         sww.store_timestep()
    1394 
    1395         # Check contents in NetCDF
    1396         fid = NetCDFFile(sww.filename, netcdf_mode_r)
    1397 
    1398         # Get the variables
    1399         x = fid.variables['x'][:]
    1400         y = fid.variables['y'][:]
    1401         elevation = fid.variables['elevation'][:]
    1402         time = fid.variables['time'][:]
    1403         stage = fid.variables['stage'][:]
    1404 
    1405         volumes = fid.variables['volumes'][:]
    1406 
    1407 
    1408         # Invoke interpolation for vertex points       
    1409         points = num.concatenate( (x[:,num.newaxis],y[:,num.newaxis]), axis=1 )
    1410         points = num.ascontiguousarray(points)
    1411         sww2pts(self.domain.get_name(),
    1412                 quantity = 'elevation',
    1413                 data_points = points,
    1414                 NODATA_value = NODATA_value,
    1415                 verbose = self.verbose)
    1416         ref_point_values = elevation
    1417         point_values = Geospatial_data(ptsfile).get_attributes()
    1418         #print 'P', point_values
    1419         #print 'Ref', ref_point_values       
    1420         assert num.allclose(point_values, ref_point_values)       
    1421 
    1422 
    1423 
    1424         # Invoke interpolation for centroids
    1425         points = self.domain.get_centroid_coordinates()
    1426         #print points
    1427         sww2pts(self.domain.get_name(),
    1428                 quantity = 'elevation',
    1429                 data_points = points,
    1430                 NODATA_value = NODATA_value,
    1431                 verbose = self.verbose)
    1432         ref_point_values = [-0.5, -0.5, -1, -1, -1, -1, -1.5, -1.5]   #At centroids
    1433 
    1434        
    1435         point_values = Geospatial_data(ptsfile).get_attributes()
    1436         #print 'P', point_values
    1437         #print 'Ref', ref_point_values       
    1438         assert num.allclose(point_values, ref_point_values)       
    1439 
    1440 
    1441 
    1442         fid.close()
    1443 
    1444         #Cleanup
    1445         os.remove(sww.filename)
    1446         os.remove(ptsfile)
    14471231
    14481232
     
    20271811
    20281812        os.remove(root + '.dem')
    2029         os.remove(root + '_100.dem')
    2030 
    2031 
    2032     def write_mux(self, lat_long_points, time_step_count, time_step,
    2033                   depth=None, ha=None, ua=None, va=None):
    2034         """
    2035         This will write 3 non-gridded mux files, for testing.
    2036         If no quantities are passed in,
    2037         na and va quantities will be the Easting values.
    2038         Depth and ua will be the Northing value.
    2039        
    2040         The mux file format has south as positive so
    2041         this function will swap the sign for va. 
    2042         """
    2043 
    2044         #print "lat_long_points", lat_long_points
    2045         #print "time_step_count",time_step_count
    2046         #print "time_step",
    2047 
    2048        
    2049         points_num = len(lat_long_points)
    2050         lonlatdeps = []
    2051         quantities = ['HA','UA','VA']
    2052        
    2053         mux_names = [WAVEHEIGHT_MUX_LABEL,
    2054                      EAST_VELOCITY_LABEL,
    2055                      NORTH_VELOCITY_LABEL]
    2056         quantities_init = [[],[],[]]
    2057         # urs binary is latitude fastest
    2058         for point in lat_long_points:
    2059             lat = point[0]
    2060             lon = point[1]
    2061             _ , e, n = redfearn(lat, lon)
    2062             if depth is None:
    2063                 this_depth = n
    2064             else:
    2065                 this_depth = depth
    2066             if ha is None:
    2067                 this_ha = e
    2068             else:
    2069                 this_ha = ha
    2070             if ua is None:
    2071                 this_ua = n
    2072             else:
    2073                 this_ua = ua
    2074             if va is None:
    2075                 this_va = e   
    2076             else:
    2077                 this_va = va         
    2078             lonlatdeps.append([lon, lat, this_depth])
    2079             quantities_init[0].append(this_ha) # HA
    2080             quantities_init[1].append(this_ua) # UA
    2081             quantities_init[2].append(this_va) # VA
    2082                
    2083         file_handle, base_name = tempfile.mkstemp("")
    2084         os.close(file_handle)
    2085         os.remove(base_name)
    2086 
    2087         files = []       
    2088         for i, q in enumerate(quantities):
    2089             quantities_init[i] = ensure_numeric(quantities_init[i])
    2090             #print "HA_init", HA_init
    2091             q_time = num.zeros((time_step_count, points_num), num.float)
    2092             for time in range(time_step_count):
    2093                 q_time[time,:] = quantities_init[i] #* time * 4
    2094            
    2095             #Write C files
    2096             columns = 3 # long, lat , depth
    2097             file = base_name + mux_names[i]
    2098             #print "base_name file",file
    2099             f = open(file, 'wb')
    2100             files.append(file)
    2101             f.write(pack('i',points_num))
    2102             f.write(pack('i',time_step_count))
    2103             f.write(pack('f',time_step))
    2104 
    2105             #write lat/long info
    2106             for lonlatdep in lonlatdeps:
    2107                 for float in lonlatdep:
    2108                     f.write(pack('f',float))
    2109                    
    2110             # Write quantity info
    2111             for time in  range(time_step_count):
    2112                 for point_i in range(points_num):
    2113                     f.write(pack('f',q_time[time,point_i]))
    2114                     #print " mux_names[i]", mux_names[i]
    2115                     #print "f.write(pack('f',q_time[time,i]))", q_time[time,point_i]
    2116             f.close()
    2117         return base_name, files
    2118        
    2119    
    2120     def delete_mux(self, files):
    2121         for file in files:
    2122             os.remove(file)
    2123        
    2124     def write_mux2(self, lat_long_points, time_step_count, time_step,
    2125                    first_tstep, last_tstep,
    2126                    depth=None, ha=None, ua=None, va=None):
    2127         """
    2128         This will write 3 non-gridded mux files, for testing.
    2129         If no quantities are passed in,
    2130         na and va quantities will be the Easting values.
    2131         Depth and ua will be the Northing value.
    2132         """
    2133         #print "lat_long_points", lat_long_points
    2134         #print "time_step_count",time_step_count
    2135         #print "time_step",
    2136 
    2137         #irrelevant header information
    2138         ig=ilon=ilat=0
    2139         mcolat=mcolon=centerlat=centerlon=offset=az=baz=id=0.0
    2140 
    2141         points_num = len(lat_long_points)
    2142         latlondeps = []
    2143         quantities = ['HA','UA','VA']
    2144 
    2145         mux_names = [WAVEHEIGHT_MUX2_LABEL,
    2146                      EAST_VELOCITY_MUX2_LABEL,
    2147                      NORTH_VELOCITY_MUX2_LABEL]
    2148 
    2149         msg='first_tstep and last_step arrays must have same length as number of points'
    2150         assert len(first_tstep)==points_num,msg
    2151         assert len(last_tstep)==points_num,msg
    2152 
    2153         if depth is not None:
    2154             depth=ensure_numeric(depth)
    2155             assert len(depth)==points_num
    2156         if ha is not None:
    2157             ha=ensure_numeric(ha)
    2158             assert ha.shape==(points_num,time_step_count)
    2159         if ua is not None:
    2160             ua=ensure_numeric(ua)
    2161             assert ua.shape==(points_num,time_step_count)
    2162         if va is not None:
    2163             va=ensure_numeric(va)
    2164             assert va.shape==(points_num,time_step_count)
    2165 
    2166         quantities_init = [[],[],[]]
    2167         # urs binary is latitude fastest
    2168         for i,point in enumerate(lat_long_points):
    2169             lat = point[0]
    2170             lon = point[1]
    2171             _ , e, n = redfearn(lat, lon)
    2172             if depth is None:
    2173                 this_depth = n
    2174             else:
    2175                 this_depth = depth[i]
    2176             latlondeps.append([lat, lon, this_depth])
    2177 
    2178             if ha is None:
    2179                 this_ha = e
    2180                 quantities_init[0].append(num.ones(time_step_count,num.float)*this_ha) # HA
    2181             else:
    2182                 quantities_init[0].append(ha[i])
    2183             if ua is None:
    2184                 this_ua = n
    2185                 quantities_init[1].append(num.ones(time_step_count,num.float)*this_ua) # UA
    2186             else:
    2187                 quantities_init[1].append(ua[i])
    2188             if va is None:
    2189                 this_va = e
    2190                 quantities_init[2].append(num.ones(time_step_count,num.float)*this_va) #
    2191             else:
    2192                 quantities_init[2].append(-va[i]) # South is negative in MUX
    2193 
    2194         file_handle, base_name = tempfile.mkstemp("write_mux2")
    2195         os.close(file_handle)
    2196         os.remove(base_name)
    2197 
    2198         files = []       
    2199         for i, q in enumerate(quantities):
    2200             q_time = num.zeros((time_step_count, points_num), num.float)
    2201             quantities_init[i] = ensure_numeric(quantities_init[i])
    2202             for time in range(time_step_count):
    2203                 #print i, q, time, quantities_init[i][:,time]
    2204                 q_time[time,:] = quantities_init[i][:,time]
    2205                 #print i, q, time, q_time[time, :]               
    2206 
    2207             #Write C files
    2208             columns = 3 # long, lat , depth
    2209             file = base_name + mux_names[i]
    2210            
    2211             #print 'base_name file', file
    2212             f = open(file, 'wb')
    2213             files.append(file)
    2214 
    2215             f.write(pack('i',points_num))
    2216             #write mux 2 header
    2217             for latlondep in latlondeps:
    2218                 f.write(pack('f',latlondep[0]))
    2219                 f.write(pack('f',latlondep[1]))
    2220                 f.write(pack('f',mcolat))
    2221                 f.write(pack('f',mcolon))
    2222                 f.write(pack('i',ig))
    2223                 f.write(pack('i',ilon))
    2224                 f.write(pack('i',ilat))
    2225                 f.write(pack('f',latlondep[2]))
    2226                 f.write(pack('f',centerlat))
    2227                 f.write(pack('f',centerlon))
    2228                 f.write(pack('f',offset))
    2229                 f.write(pack('f',az))
    2230                 f.write(pack('f',baz))
    2231                 f.write(pack('f',time_step))
    2232                 f.write(pack('i',time_step_count))
    2233                 for j in range(4): # identifier
    2234                     f.write(pack('f',id))   
    2235 
    2236             #first_tstep=1
    2237             #last_tstep=time_step_count
    2238             for i,latlondep in enumerate(latlondeps):
    2239                 f.write(pack('i',first_tstep[i]))
    2240             for i,latlondep in enumerate(latlondeps):
    2241                 f.write(pack('i',last_tstep[i]))
    2242 
    2243             # Find when first station starts recording
    2244             min_tstep = min(first_tstep)
    2245             # Find when all stations have stopped recording
    2246             max_tstep = max(last_tstep)
    2247 
    2248             #for time in  range(time_step_count):
    2249             for time in range(min_tstep-1,max_tstep):
    2250                 f.write(pack('f',time*time_step))               
    2251                 for point_i in range(points_num):
    2252                     if time+1>=first_tstep[point_i] and time+1<=last_tstep[point_i]:
    2253                         #print 'writing', time, point_i, q_time[time, point_i]
    2254                         f.write(pack('f', q_time[time, point_i]))
    2255             f.close()
    2256 
    2257         return base_name, files
    2258 
    2259     def test_urs2sts_read_mux2_pyI(self):
    2260         """test_urs2sts_read_mux2_pyI(self):
    2261         Constant stage,momentum at each gauge
    2262         """
    2263         tide = 1
    2264         time_step_count = 3
    2265         time_step = 2
    2266         lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]
    2267         n=len(lat_long_points)
    2268         first_tstep=num.ones(n,num.int)
    2269         last_tstep=time_step_count*num.ones(n,num.int)
    2270         depth=20*num.ones(n,num.float)
    2271         ha=2*num.ones((n,time_step_count),num.float)
    2272         ua=5*num.ones((n,time_step_count),num.float)
    2273         va=-10*num.ones((n,time_step_count),num.float)
    2274         #-ve added to take into account mux file format where south is positive.
    2275         base_name, files = self.write_mux2(lat_long_points,
    2276                                       time_step_count, time_step,
    2277                                       first_tstep, last_tstep,
    2278                                       depth=depth,
    2279                                       ha=ha,
    2280                                       ua=ua,
    2281                                       va=va)
    2282 
    2283         weights=num.ones(1, num.float)
    2284         #ensure that files are indeed mux2 files
    2285         times, latitudes, longitudes, elevation, stage, starttime = read_mux2_py([files[0]],weights)
    2286         ua_times, ua_latitudes, ua_longitudes, ua_elevation, xvelocity,starttime_ua=read_mux2_py([files[1]],weights)
    2287         msg='ha and ua have different gauge meta data'
    2288         assert num.allclose(times,ua_times) and num.allclose(latitudes,ua_latitudes) and num.allclose(longitudes,ua_longitudes) and num.allclose(elevation,ua_elevation) and num.allclose(starttime,starttime_ua), msg
    2289         va_times, va_latitudes, va_longitudes, va_elevation, yvelocity, starttime_va=read_mux2_py([files[2]],weights)
    2290         msg='ha and va have different gauge meta data'
    2291         assert num.allclose(times,va_times) and num.allclose(latitudes,va_latitudes) and num.allclose(longitudes,va_longitudes) and num.allclose(elevation,va_elevation) and num.allclose(starttime,starttime_va), msg
    2292 
    2293         self.delete_mux(files)
    2294 
    2295         msg='time array has incorrect length'
    2296         assert times.shape[0]==time_step_count,msg
    2297        
    2298         msg = 'time array is incorrect'
    2299         #assert allclose(times,time_step*num.arange(1,time_step_count+1)),msg
    2300         assert num.allclose(times,time_step*num.arange(time_step_count)), msg
    2301        
    2302         msg='Incorrect gauge positions returned'
    2303         for i,point in enumerate(lat_long_points):
    2304             assert num.allclose(latitudes[i],point[0]) and num.allclose(longitudes[i],point[1]),msg
    2305 
    2306         msg='Incorrect gauge depths returned'
    2307         assert num.allclose(elevation,-depth),msg
    2308         msg='incorrect gauge height time series returned'
    2309         assert num.allclose(stage,ha)
    2310         msg='incorrect gauge ua time series returned'
    2311         assert num.allclose(xvelocity,ua)
    2312         msg='incorrect gauge va time series returned'
    2313         assert num.allclose(yvelocity, -va)
    2314 
    2315     def test_urs2sts_read_mux2_pyII(self):
    2316         """Spatially varing stage
    2317         """
    2318         tide = 1
    2319         time_step_count = 3
    2320         time_step = 2
    2321         lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]
    2322         n=len(lat_long_points)
    2323         first_tstep=num.ones(n,num.int)
    2324         last_tstep=(time_step_count)*num.ones(n,num.int)
    2325         depth=20*num.ones(n,num.float)
    2326         ha=2*num.ones((n,time_step_count),num.float)
    2327         ha[0]=num.arange(0,time_step_count)+1
    2328         ha[1]=time_step_count-num.arange(1,time_step_count+1)
    2329         ha[1]=num.arange(time_step_count,2*time_step_count)
    2330         ha[2]=num.arange(2*time_step_count,3*time_step_count)
    2331         ha[3]=num.arange(3*time_step_count,4*time_step_count)
    2332         ua=5*num.ones((n,time_step_count),num.float)
    2333         va=-10*num.ones((n,time_step_count),num.float)
    2334         #-ve added to take into account mux file format where south is positive.
    2335         base_name, files = self.write_mux2(lat_long_points,
    2336                                       time_step_count, time_step,
    2337                                       first_tstep, last_tstep,
    2338                                       depth=depth,
    2339                                       ha=ha,
    2340                                       ua=ua,
    2341                                       va=va)
    2342 
    2343         weights=num.ones(1, num.float)
    2344         #ensure that files are indeed mux2 files
    2345         times, latitudes, longitudes, elevation, stage,starttime=read_mux2_py([files[0]],weights)
    2346         ua_times, ua_latitudes, ua_longitudes, ua_elevation, xvelocity,starttime_ua=read_mux2_py([files[1]],weights)
    2347         msg='ha and ua have different gauge meta data'
    2348         assert num.allclose(times,ua_times) and num.allclose(latitudes,ua_latitudes) and num.allclose(longitudes,ua_longitudes) and num.allclose(elevation,ua_elevation) and num.allclose(starttime,starttime_ua), msg
    2349         va_times, va_latitudes, va_longitudes, va_elevation, yvelocity,starttime_va=read_mux2_py([files[2]],weights)
    2350         msg='ha and va have different gauge meta data'
    2351         assert num.allclose(times,va_times) and num.allclose(latitudes,va_latitudes) and num.allclose(longitudes,va_longitudes) and num.allclose(elevation,va_elevation) and num.allclose(starttime,starttime_va), msg
    2352 
    2353 
    2354         self.delete_mux(files)
    2355 
    2356         msg='time array has incorrect length'
    2357         #assert times.shape[0]==time_step_count,msg
    2358         msg = 'time array is incorrect'
    2359         #assert allclose(times,time_step*num.arange(1,time_step_count+1)),msg
    2360         msg='Incorrect gauge positions returned'
    2361         for i,point in enumerate(lat_long_points):
    2362             assert num.allclose(latitudes[i],point[0]) and num.allclose(longitudes[i],point[1]),msg
    2363 
    2364         msg='Incorrect gauge depths returned'
    2365         assert num.allclose(elevation, -depth),msg
    2366         msg='incorrect gauge height time series returned'
    2367         assert num.allclose(stage, ha)
    2368         msg='incorrect gauge ua time series returned'
    2369         assert num.allclose(xvelocity, ua)
    2370         msg='incorrect gauge va time series returned'
    2371         assert num.allclose(yvelocity, -va) # South is positive in MUX
    2372 
    2373     def test_urs2sts_read_mux2_pyIII(self):
    2374         """Varying start and finish times
    2375         """
    2376         tide = 1
    2377         time_step_count = 3
    2378         time_step = 2
    2379         lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]
    2380         n=len(lat_long_points)
    2381         first_tstep=num.ones(n,num.int)
    2382         first_tstep[0]+=1
    2383         first_tstep[2]+=1
    2384         last_tstep=(time_step_count)*num.ones(n,num.int)
    2385         last_tstep[0]-=1
    2386 
    2387         depth=20*num.ones(n,num.float)
    2388         ha=2*num.ones((n,time_step_count),num.float)
    2389         ha[0]=num.arange(0,time_step_count)
    2390         ha[1]=num.arange(time_step_count,2*time_step_count)
    2391         ha[2]=num.arange(2*time_step_count,3*time_step_count)
    2392         ha[3]=num.arange(3*time_step_count,4*time_step_count)
    2393         ua=5*num.ones((n,time_step_count),num.float)
    2394         va=-10*num.ones((n,time_step_count),num.float)
    2395         #-ve added to take into account mux file format where south is positive.
    2396         base_name, files = self.write_mux2(lat_long_points,
    2397                                       time_step_count, time_step,
    2398                                       first_tstep, last_tstep,
    2399                                       depth=depth,
    2400                                       ha=ha,
    2401                                       ua=ua,
    2402                                       va=va)
    2403 
    2404         weights=num.ones(1, num.float)
    2405         #ensure that files are indeed mux2 files
    2406         times, latitudes, longitudes, elevation, stage, starttime=read_mux2_py([files[0]],weights)
    2407         ua_times, ua_latitudes, ua_longitudes, ua_elevation, xvelocity, starttime_ua=read_mux2_py([files[1]],weights)
    2408         msg='ha and ua have different gauge meta data'
    2409         assert num.allclose(times,ua_times) and num.allclose(latitudes,ua_latitudes) and num.allclose(longitudes,ua_longitudes) and num.allclose(elevation,ua_elevation) and num.allclose(starttime,starttime_ua), msg
    2410         va_times, va_latitudes, va_longitudes, va_elevation, yvelocity,starttime_va=read_mux2_py([files[2]],weights)
    2411         msg='ha and va have different gauge meta data'
    2412         assert num.allclose(times,va_times) and num.allclose(latitudes,va_latitudes) and num.allclose(longitudes,va_longitudes) and num.allclose(elevation,va_elevation) and num.allclose(starttime,starttime_va), msg
    2413 
    2414         self.delete_mux(files)
    2415 
    2416         msg='time array has incorrect length'
    2417         #assert times.shape[0]==time_step_count,msg
    2418         msg = 'time array is incorrect'
    2419         #assert allclose(times,time_step*num.arange(1,time_step_count+1)),msg
    2420         msg='Incorrect gauge positions returned'
    2421         for i,point in enumerate(lat_long_points):
    2422             assert num.allclose(latitudes[i],point[0]) and num.allclose(longitudes[i],point[1]),msg
    2423 
    2424 
    2425         # Set original data used to write mux file to be zero when gauges are
    2426         #not recdoring
    2427         ha[0][0]=0.0
    2428         ha[0][time_step_count-1]=0.0;
    2429         ha[2][0]=0.0;
    2430         ua[0][0]=0.0
    2431         ua[0][time_step_count-1]=0.0;
    2432         ua[2][0]=0.0;
    2433         va[0][0]=0.0
    2434         va[0][time_step_count-1]=0.0;
    2435         va[2][0]=0.0;
    2436         msg='Incorrect gauge depths returned'
    2437         assert num.allclose(elevation,-depth),msg
    2438         msg='incorrect gauge height time series returned'
    2439         assert num.allclose(stage,ha)
    2440         msg='incorrect gauge ua time series returned'
    2441         assert num.allclose(xvelocity,ua)
    2442         msg='incorrect gauge va time series returned'
    2443         assert num.allclose(yvelocity, -va) # South is positive in mux
    2444        
    2445 
    2446        
    2447     def test_read_mux_platform_problem1(self):
    2448         """test_read_mux_platform_problem1
    2449        
    2450         This is to test a situation where read_mux returned
    2451         wrong values Win32
    2452 
    2453         This test passes on Windows but test_read_mux_platform_problem2
    2454         does not
    2455         """
    2456        
    2457         from urs_ext import read_mux2
    2458        
    2459         verbose = False
    2460                
    2461         tide = 1.5
    2462         time_step_count = 10
    2463         time_step = 0.2
    2464         times_ref = num.arange(0, time_step_count*time_step, time_step)
    2465 
    2466         lat_long_points = [(-21.5,114.5), (-21,114.5), (-21.5,115), (-21.,115.), (-22., 117.)]
    2467         n = len(lat_long_points)
    2468        
    2469         # Create different timeseries starting and ending at different times
    2470         first_tstep=num.ones(n, num.int)
    2471         first_tstep[0]+=2   # Point 0 starts at 2
    2472         first_tstep[1]+=4   # Point 1 starts at 4       
    2473         first_tstep[2]+=3   # Point 2 starts at 3
    2474        
    2475         last_tstep=(time_step_count)*num.ones(n,num.int)
    2476         last_tstep[0]-=1    # Point 0 ends 1 step early
    2477         last_tstep[1]-=2    # Point 1 ends 2 steps early               
    2478         last_tstep[4]-=3    # Point 4 ends 3 steps early       
    2479        
    2480         # Create varying elevation data (positive values for seafloor)
    2481         gauge_depth=20*num.ones(n,num.float)
    2482         for i in range(n):
    2483             gauge_depth[i] += i**2
    2484            
    2485         # Create data to be written to first mux file       
    2486         ha0=2*num.ones((n,time_step_count),num.float)
    2487         ha0[0]=num.arange(0,time_step_count)
    2488         ha0[1]=num.arange(time_step_count,2*time_step_count)
    2489         ha0[2]=num.arange(2*time_step_count,3*time_step_count)
    2490         ha0[3]=num.arange(3*time_step_count,4*time_step_count)
    2491         ua0=5*num.ones((n,time_step_count),num.float)
    2492         va0=-10*num.ones((n,time_step_count),num.float)
    2493 
    2494         # Ensure data used to write mux file to be zero when gauges are
    2495         # not recording
    2496         for i in range(n):
    2497              # For each point
    2498              for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count):
    2499                  # For timesteps before and after recording range
    2500                  ha0[i][j] = ua0[i][j] = va0[i][j] = 0.0                                 
    2501        
    2502         # Write first mux file to be combined by urs2sts
    2503         base_nameI, filesI = self.write_mux2(lat_long_points,
    2504                                              time_step_count, time_step,
    2505                                              first_tstep, last_tstep,
    2506                                              depth=gauge_depth,
    2507                                              ha=ha0,
    2508                                              ua=ua0,
    2509                                              va=va0)
    2510 
    2511         # Create ordering file
    2512         permutation = ensure_numeric([4,0,2])
    2513 
    2514         _, ordering_filename = tempfile.mkstemp('')
    2515         order_fid = open(ordering_filename, 'w') 
    2516         order_fid.write('index, longitude, latitude\n')
    2517         for index in permutation:
    2518             order_fid.write('%d, %f, %f\n' %(index,
    2519                                              lat_long_points[index][1],
    2520                                              lat_long_points[index][0]))
    2521         order_fid.close()
    2522        
    2523        
    2524 
    2525         # -------------------------------------
    2526         # Now read files back and check values
    2527         weights = ensure_numeric([1.0])
    2528 
    2529         # For each quantity read the associated list of source mux2 file with
    2530         # extention associated with that quantity
    2531         file_params=-1*num.ones(3,num.float) #[nsta,dt,nt]
    2532         OFFSET = 5
    2533 
    2534         for j, file in enumerate(filesI):
    2535             data = read_mux2(1, [file], weights, file_params, permutation, verbose)
    2536 
    2537             number_of_selected_stations = data.shape[0]
    2538 
    2539             # Index where data ends and parameters begin
    2540             parameters_index = data.shape[1]-OFFSET         
    2541          
    2542             for i in range(number_of_selected_stations):
    2543                 if j == 0: assert num.allclose(data[i][:parameters_index], ha0[permutation[i], :])
    2544                 if j == 1: assert num.allclose(data[i][:parameters_index], ua0[permutation[i], :])
    2545                 if j == 2: assert num.allclose(data[i][:parameters_index], -va0[permutation[i], :])
    2546        
    2547         self.delete_mux(filesI)
    2548 
    2549 
    2550        
    2551        
    2552     def test_read_mux_platform_problem2(self):
    2553         """test_read_mux_platform_problem2
    2554        
    2555         This is to test a situation where read_mux returned
    2556         wrong values Win32
    2557 
    2558         This test does not pass on Windows but test_read_mux_platform_problem1
    2559         does
    2560         """
    2561        
    2562         from urs_ext import read_mux2
    2563        
    2564         from anuga.config import single_precision as epsilon       
    2565        
    2566         verbose = False
    2567                
    2568         tide = 1.5
    2569         time_step_count = 10
    2570         time_step = 0.2
    2571        
    2572         times_ref = num.arange(0, time_step_count*time_step, time_step)
    2573        
    2574         lat_long_points = [(-21.5,114.5), (-21,114.5), (-21.5,115),
    2575                            (-21.,115.), (-22., 117.)]
    2576         n = len(lat_long_points)
    2577        
    2578         # Create different timeseries starting and ending at different times
    2579         first_tstep=num.ones(n,num.int)
    2580         first_tstep[0]+=2   # Point 0 starts at 2
    2581         first_tstep[1]+=4   # Point 1 starts at 4       
    2582         first_tstep[2]+=3   # Point 2 starts at 3
    2583        
    2584         last_tstep=(time_step_count)*num.ones(n,num.int)
    2585         last_tstep[0]-=1    # Point 0 ends 1 step early
    2586         last_tstep[1]-=2    # Point 1 ends 2 steps early               
    2587         last_tstep[4]-=3    # Point 4 ends 3 steps early       
    2588        
    2589         # Create varying elevation data (positive values for seafloor)
    2590         gauge_depth=20*num.ones(n,num.float)
    2591         for i in range(n):
    2592             gauge_depth[i] += i**2
    2593            
    2594         # Create data to be written to second mux file       
    2595         ha1=num.ones((n,time_step_count),num.float)
    2596         ha1[0]=num.sin(times_ref)
    2597         ha1[1]=2*num.sin(times_ref - 3)
    2598         ha1[2]=5*num.sin(4*times_ref)
    2599         ha1[3]=num.sin(times_ref)
    2600         ha1[4]=num.sin(2*times_ref-0.7)
    2601                
    2602         ua1=num.zeros((n,time_step_count),num.float)
    2603         ua1[0]=3*num.cos(times_ref)       
    2604         ua1[1]=2*num.sin(times_ref-0.7)   
    2605         ua1[2]=num.arange(3*time_step_count,4*time_step_count)
    2606         ua1[4]=2*num.ones(time_step_count)
    2607        
    2608         va1=num.zeros((n,time_step_count),num.float)
    2609         va1[0]=2*num.cos(times_ref-0.87)       
    2610         va1[1]=3*num.ones(time_step_count)
    2611         va1[3]=2*num.sin(times_ref-0.71)       
    2612        
    2613         # Ensure data used to write mux file to be zero when gauges are
    2614         # not recording
    2615         for i in range(n):
    2616              # For each point
    2617              for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count):
    2618                  # For timesteps before and after recording range
    2619                  ha1[i][j] = ua1[i][j] = va1[i][j] = 0.0
    2620 
    2621 
    2622         #print 'Second station to be written to MUX'
    2623         #print 'ha', ha1[0,:]
    2624         #print 'ua', ua1[0,:]
    2625         #print 'va', va1[0,:]
    2626        
    2627         # Write second mux file to be combined by urs2sts
    2628         base_nameII, filesII = self.write_mux2(lat_long_points,
    2629                                                time_step_count, time_step,
    2630                                                first_tstep, last_tstep,
    2631                                                depth=gauge_depth,
    2632                                                ha=ha1,
    2633                                                ua=ua1,
    2634                                                va=va1)
    2635 
    2636 
    2637 
    2638 
    2639         # Read mux file back and verify it's correcness
    2640 
    2641         ####################################################
    2642         # FIXME (Ole): This is where the test should
    2643         # verify that the MUX files are correct.
    2644 
    2645         #JJ: It appears as though
    2646         #that certain quantities are not being stored with enough precision
    2647         #inn muxfile or more likely that they are being cast into a
    2648         #lower precision when read in using read_mux2 Time step and q_time
    2649         # are equal but only to approx 1e-7
    2650         ####################################################
    2651 
    2652         #define information as it should be stored in mus2 files
    2653         points_num=len(lat_long_points)
    2654         depth=gauge_depth
    2655         ha=ha1
    2656         ua=ua1
    2657         va=va1
    2658        
    2659         quantities = ['HA','UA','VA']
    2660         mux_names = [WAVEHEIGHT_MUX2_LABEL,
    2661                      EAST_VELOCITY_MUX2_LABEL,
    2662                      NORTH_VELOCITY_MUX2_LABEL]
    2663         quantities_init = [[],[],[]]
    2664         latlondeps = []
    2665         #irrelevant header information
    2666         ig=ilon=ilat=0
    2667         mcolat=mcolon=centerlat=centerlon=offset=az=baz=id=0.0
    2668         # urs binary is latitude fastest
    2669         for i,point in enumerate(lat_long_points):
    2670             lat = point[0]
    2671             lon = point[1]
    2672             _ , e, n = redfearn(lat, lon)
    2673             if depth is None:
    2674                 this_depth = n
    2675             else:
    2676                 this_depth = depth[i]
    2677             latlondeps.append([lat, lon, this_depth])
    2678 
    2679             if ha is None:
    2680                 this_ha = e
    2681                 quantities_init[0].append(num.ones(time_step_count,num.float)*this_ha) # HA
    2682             else:
    2683                 quantities_init[0].append(ha[i])
    2684             if ua is None:
    2685                 this_ua = n
    2686                 quantities_init[1].append(num.ones(time_step_count,num.float)*this_ua) # UA
    2687             else:
    2688                 quantities_init[1].append(ua[i])
    2689             if va is None:
    2690                 this_va = e
    2691                 quantities_init[2].append(num.ones(time_step_count,num.float)*this_va) #
    2692             else:
    2693                 quantities_init[2].append(va[i])
    2694 
    2695         for i, q in enumerate(quantities):
    2696             #print
    2697             #print i, q
    2698            
    2699             q_time = num.zeros((time_step_count, points_num), num.float)
    2700             quantities_init[i] = ensure_numeric(quantities_init[i])
    2701             for time in range(time_step_count):
    2702                 #print i, q, time, quantities_init[i][:,time]
    2703                 q_time[time,:] = quantities_init[i][:,time]
    2704                 #print i, q, time, q_time[time, :]
    2705 
    2706            
    2707             filename = base_nameII + mux_names[i]
    2708             f = open(filename, 'rb')
    2709             if self.verbose: print 'Reading' + filename
    2710             assert abs(points_num-unpack('i',f.read(4))[0])<epsilon
    2711             #write mux 2 header
    2712             for latlondep in latlondeps:
    2713                 assert abs(latlondep[0]-unpack('f',f.read(4))[0])<epsilon
    2714                 assert abs(latlondep[1]-unpack('f',f.read(4))[0])<epsilon
    2715                 assert abs(mcolat-unpack('f',f.read(4))[0])<epsilon
    2716                 assert abs(mcolon-unpack('f',f.read(4))[0])<epsilon
    2717                 assert abs(ig-unpack('i',f.read(4))[0])<epsilon
    2718                 assert abs(ilon-unpack('i',f.read(4))[0])<epsilon
    2719                 assert abs(ilat-unpack('i',f.read(4))[0])<epsilon
    2720                 assert abs(latlondep[2]-unpack('f',f.read(4))[0])<epsilon
    2721                 assert abs(centerlat-unpack('f',f.read(4))[0])<epsilon
    2722                 assert abs(centerlon-unpack('f',f.read(4))[0])<epsilon
    2723                 assert abs(offset-unpack('f',f.read(4))[0])<epsilon
    2724                 assert abs(az-unpack('f',f.read(4))[0])<epsilon
    2725                 assert abs(baz-unpack('f',f.read(4))[0])<epsilon
    2726                
    2727                 x = unpack('f', f.read(4))[0]
    2728                 #print time_step
    2729                 #print x
    2730                 assert abs(time_step-x)<epsilon
    2731                 assert abs(time_step_count-unpack('i',f.read(4))[0])<epsilon
    2732                 for j in range(4): # identifier
    2733                     assert abs(id-unpack('i',f.read(4))[0])<epsilon
    2734 
    2735             #first_tstep=1
    2736             #last_tstep=time_step_count
    2737             for i,latlondep in enumerate(latlondeps):
    2738                 assert abs(first_tstep[i]-unpack('i',f.read(4))[0])<epsilon
    2739             for i,latlondep in enumerate(latlondeps):
    2740                 assert abs(last_tstep[i]-unpack('i',f.read(4))[0])<epsilon
    2741 
    2742             # Find when first station starts recording
    2743             min_tstep = min(first_tstep)
    2744             # Find when all stations have stopped recording
    2745             max_tstep = max(last_tstep)
    2746 
    2747             #for time in  range(time_step_count):
    2748             for time in range(min_tstep-1,max_tstep):
    2749                 assert abs(time*time_step-unpack('f',f.read(4))[0])<epsilon
    2750                 for point_i in range(points_num):
    2751                     if time+1>=first_tstep[point_i] and time+1<=last_tstep[point_i]:
    2752                         x = unpack('f',f.read(4))[0]
    2753                         #print time, x, q_time[time, point_i]
    2754                         if q == 'VA': x = -x # South is positive in MUX
    2755                         assert abs(q_time[time, point_i]-x)<epsilon
    2756 
    2757             f.close()
    2758                                                
    2759         # Create ordering file
    2760         permutation = ensure_numeric([4,0,2])
    2761 
    2762        #  _, ordering_filename = tempfile.mkstemp('')
    2763 #         order_fid = open(ordering_filename, 'w') 
    2764 #         order_fid.write('index, longitude, latitude\n')
    2765 #         for index in permutation:
    2766 #             order_fid.write('%d, %f, %f\n' %(index,
    2767 #                                              lat_long_points[index][1],
    2768 #                                              lat_long_points[index][0]))
    2769 #         order_fid.close()
    2770        
    2771         # -------------------------------------
    2772         # Now read files back and check values
    2773         weights = ensure_numeric([1.0])
    2774 
    2775         # For each quantity read the associated list of source mux2 file with
    2776         # extention associated with that quantity
    2777         file_params=-1*num.ones(3,num.float) # [nsta,dt,nt]
    2778         OFFSET = 5
    2779 
    2780         for j, file in enumerate(filesII):
    2781             # Read stage, u, v enumerated as j
    2782             #print 'Reading', j, file
    2783             data = read_mux2(1, [file], weights, file_params, permutation, verbose)
    2784 
    2785             #print 'Data received by Python'
    2786             #print data[1][8]
    2787             number_of_selected_stations = data.shape[0]
    2788 
    2789             # Index where data ends and parameters begin
    2790             parameters_index = data.shape[1]-OFFSET         
    2791                  
    2792             quantity=num.zeros((number_of_selected_stations, parameters_index), num.float)
    2793            
    2794            
    2795             for i in range(number_of_selected_stations):
    2796        
    2797                 #print i, parameters_index
    2798                 #print quantity[i][:]
    2799                 if j == 0: assert num.allclose(data[i][:parameters_index], ha1[permutation[i], :])
    2800                 if j == 1: assert num.allclose(data[i][:parameters_index], ua1[permutation[i], :])
    2801                 if j == 2:
    2802                     # FIXME (Ole): This is where the output is wrong on Win32
    2803                    
    2804                     #print
    2805                     #print j, i
    2806                     #print 'Input'
    2807                     #print 'u', ua1[permutation[i], 8]       
    2808                     #print 'v', va1[permutation[i], 8]
    2809                
    2810                     #print 'Output'
    2811                     #print 'v ', data[i][:parameters_index][8] 
    2812 
    2813                     # South is positive in MUX
    2814                     #print "data[i][:parameters_index]", data[i][:parameters_index]
    2815                     #print "-va1[permutation[i], :]", -va1[permutation[i], :]
    2816                     assert num.allclose(data[i][:parameters_index], -va1[permutation[i], :])
    2817        
    2818         self.delete_mux(filesII)
    2819            
    2820     def test_read_mux_platform_problem3(self):
    2821        
    2822         # This is to test a situation where read_mux returned
    2823         # wrong values Win32
    2824 
    2825        
    2826         from urs_ext import read_mux2
    2827        
    2828         from anuga.config import single_precision as epsilon       
    2829        
    2830         verbose = False
    2831                
    2832         tide = 1.5
    2833         time_step_count = 10
    2834         time_step = 0.02
    2835 
    2836         '''
    2837         Win results
    2838         time_step = 0.2000001
    2839         This is OK       
    2840         '''
    2841        
    2842         '''
    2843         Win results
    2844         time_step = 0.20000001
    2845 
    2846         ======================================================================
    2847 ERROR: test_read_mux_platform_problem3 (__main__.Test_Data_Manager)
    2848 ----------------------------------------------------------------------
    2849 Traceback (most recent call last):
    2850   File "test_data_manager.py", line 6718, in test_read_mux_platform_problem3
    2851     ha1[0]=num.sin(times_ref)
    2852 ValueError: matrices are not aligned for copy
    2853 
    2854         '''
    2855 
    2856         '''
    2857         Win results
    2858         time_step = 0.200000001
    2859         FAIL
    2860          assert num.allclose(data[i][:parameters_index],
    2861          -va1[permutation[i], :])
    2862         '''
    2863         times_ref = num.arange(0, time_step_count*time_step, time_step)
    2864         #print "times_ref", times_ref
    2865        
    2866         lat_long_points = [(-21.5,114.5), (-21,114.5), (-21.5,115),
    2867                            (-21.,115.), (-22., 117.)]
    2868         stations = len(lat_long_points)
    2869        
    2870         # Create different timeseries starting and ending at different times
    2871         first_tstep=num.ones(stations, num.int)
    2872         first_tstep[0]+=2   # Point 0 starts at 2
    2873         first_tstep[1]+=4   # Point 1 starts at 4       
    2874         first_tstep[2]+=3   # Point 2 starts at 3
    2875        
    2876         last_tstep=(time_step_count)*num.ones(stations, num.int)
    2877         last_tstep[0]-=1    # Point 0 ends 1 step early
    2878         last_tstep[1]-=2    # Point 1 ends 2 steps early               
    2879         last_tstep[4]-=3    # Point 4 ends 3 steps early       
    2880        
    2881         # Create varying elevation data (positive values for seafloor)
    2882         gauge_depth=20*num.ones(stations, num.float)
    2883         for i in range(stations):
    2884             gauge_depth[i] += i**2
    2885            
    2886         # Create data to be written to second mux file       
    2887         ha1=num.ones((stations,time_step_count), num.float)
    2888         ha1[0]=num.sin(times_ref)
    2889         ha1[1]=2*num.sin(times_ref - 3)
    2890         ha1[2]=5*num.sin(4*times_ref)
    2891         ha1[3]=num.sin(times_ref)
    2892         ha1[4]=num.sin(2*times_ref-0.7)
    2893                
    2894         ua1=num.zeros((stations,time_step_count),num.float)
    2895         ua1[0]=3*num.cos(times_ref)       
    2896         ua1[1]=2*num.sin(times_ref-0.7)   
    2897         ua1[2]=num.arange(3*time_step_count,4*time_step_count)
    2898         ua1[4]=2*num.ones(time_step_count)
    2899        
    2900         va1=num.zeros((stations,time_step_count),num.float)
    2901         va1[0]=2*num.cos(times_ref-0.87)       
    2902         va1[1]=3*num.ones(time_step_count)
    2903         va1[3]=2*num.sin(times_ref-0.71)       
    2904         #print "va1[0]", va1[0]  # The 8th element is what will go bad.
    2905         # Ensure data used to write mux file to be zero when gauges are
    2906         # not recording
    2907         for i in range(stations):
    2908              # For each point
    2909              for j in range(0, first_tstep[i]-1) + range(last_tstep[i],
    2910                                                          time_step_count):
    2911                  # For timesteps before and after recording range
    2912                  ha1[i][j] = ua1[i][j] = va1[i][j] = 0.0
    2913 
    2914 
    2915         #print 'Second station to be written to MUX'
    2916         #print 'ha', ha1[0,:]
    2917         #print 'ua', ua1[0,:]
    2918         #print 'va', va1[0,:]
    2919        
    2920         # Write second mux file to be combined by urs2sts
    2921         base_nameII, filesII = self.write_mux2(lat_long_points,
    2922                                                time_step_count, time_step,
    2923                                                first_tstep, last_tstep,
    2924                                                depth=gauge_depth,
    2925                                                ha=ha1,
    2926                                                ua=ua1,
    2927                                                va=va1)
    2928         #print "filesII", filesII
    2929 
    2930 
    2931 
    2932 
    2933         # Read mux file back and verify it's correcness
    2934 
    2935         ####################################################
    2936         # FIXME (Ole): This is where the test should
    2937         # verify that the MUX files are correct.
    2938 
    2939         #JJ: It appears as though
    2940         #that certain quantities are not being stored with enough precision
    2941         #inn muxfile or more likely that they are being cast into a
    2942         #lower precision when read in using read_mux2 Time step and q_time
    2943         # are equal but only to approx 1e-7
    2944         ####################################################
    2945 
    2946         #define information as it should be stored in mus2 files
    2947         points_num=len(lat_long_points)
    2948         depth=gauge_depth
    2949         ha=ha1
    2950         ua=ua1
    2951         va=va1
    2952        
    2953         quantities = ['HA','UA','VA']
    2954         mux_names = [WAVEHEIGHT_MUX2_LABEL,
    2955                      EAST_VELOCITY_MUX2_LABEL,
    2956                      NORTH_VELOCITY_MUX2_LABEL]
    2957         quantities_init = [[],[],[]]
    2958         latlondeps = []
    2959         #irrelevant header information
    2960         ig=ilon=ilat=0
    2961         mcolat=mcolon=centerlat=centerlon=offset=az=baz=id=0.0
    2962         # urs binary is latitude fastest
    2963         for i,point in enumerate(lat_long_points):
    2964             lat = point[0]
    2965             lon = point[1]
    2966             _ , e, n = redfearn(lat, lon)
    2967             if depth is None:
    2968                 this_depth = n
    2969             else:
    2970                 this_depth = depth[i]
    2971             latlondeps.append([lat, lon, this_depth])
    2972 
    2973             if ha is None:
    2974                 this_ha = e
    2975                 quantities_init[0].append(num.ones(time_step_count,
    2976                                                    num.float)*this_ha) # HA
    2977             else:
    2978                 quantities_init[0].append(ha[i])
    2979             if ua is None:
    2980                 this_ua = n
    2981                 quantities_init[1].append(num.ones(time_step_count,
    2982                                                    num.float)*this_ua) # UA
    2983             else:
    2984                 quantities_init[1].append(ua[i])
    2985             if va is None:
    2986                 this_va = e
    2987                 quantities_init[2].append(num.ones(time_step_count,
    2988                                                    num.float)*this_va) #
    2989             else:
    2990                 quantities_init[2].append(va[i])
    2991 
    2992         for i, q in enumerate(quantities):
    2993             #print
    2994             #print i, q
    2995            
    2996             q_time = num.zeros((time_step_count, points_num), num.float)
    2997             quantities_init[i] = ensure_numeric(quantities_init[i])
    2998             for time in range(time_step_count):
    2999                 #print i, q, time, quantities_init[i][:,time]
    3000                 q_time[time,:] = quantities_init[i][:,time]
    3001                 #print i, q, time, q_time[time, :]
    3002 
    3003            
    3004             filename = base_nameII + mux_names[i]
    3005             f = open(filename, 'rb')
    3006             if self.verbose: print 'Reading' + filename
    3007             assert abs(points_num-unpack('i',f.read(4))[0])<epsilon
    3008             #write mux 2 header
    3009             for latlondep in latlondeps:
    3010                 assert abs(latlondep[0]-unpack('f',f.read(4))[0])<epsilon
    3011                 assert abs(latlondep[1]-unpack('f',f.read(4))[0])<epsilon
    3012                 assert abs(mcolat-unpack('f',f.read(4))[0])<epsilon
    3013                 assert abs(mcolon-unpack('f',f.read(4))[0])<epsilon
    3014                 assert abs(ig-unpack('i',f.read(4))[0])<epsilon
    3015                 assert abs(ilon-unpack('i',f.read(4))[0])<epsilon
    3016                 assert abs(ilat-unpack('i',f.read(4))[0])<epsilon
    3017                 assert abs(latlondep[2]-unpack('f',f.read(4))[0])<epsilon
    3018                 assert abs(centerlat-unpack('f',f.read(4))[0])<epsilon
    3019                 assert abs(centerlon-unpack('f',f.read(4))[0])<epsilon
    3020                 assert abs(offset-unpack('f',f.read(4))[0])<epsilon
    3021                 assert abs(az-unpack('f',f.read(4))[0])<epsilon
    3022                 assert abs(baz-unpack('f',f.read(4))[0])<epsilon
    3023                
    3024                 x = unpack('f', f.read(4))[0]
    3025                 #print time_step
    3026                 #print x
    3027                 assert abs(time_step-x)<epsilon
    3028                 assert abs(time_step_count-unpack('i',f.read(4))[0])<epsilon
    3029                 for j in range(4): # identifier
    3030                     assert abs(id-unpack('i',f.read(4))[0])<epsilon
    3031 
    3032             #first_tstep=1
    3033             #last_tstep=time_step_count
    3034             for i,latlondep in enumerate(latlondeps):
    3035                 assert abs(first_tstep[i]-unpack('i',f.read(4))[0])<epsilon
    3036             for i,latlondep in enumerate(latlondeps):
    3037                 assert abs(last_tstep[i]-unpack('i',f.read(4))[0])<epsilon
    3038 
    3039             # Find when first station starts recording
    3040             min_tstep = min(first_tstep)
    3041             # Find when all stations have stopped recording
    3042             max_tstep = max(last_tstep)
    3043 
    3044             #for time in  range(time_step_count):
    3045             for time in range(min_tstep-1,max_tstep):
    3046                 assert abs(time*time_step-unpack('f',f.read(4))[0])<epsilon
    3047                 for point_i in range(points_num):
    3048                     if time+1>=first_tstep[point_i] and time+1<=last_tstep[point_i]:
    3049                         x = unpack('f',f.read(4))[0]
    3050                         #print time, x, q_time[time, point_i]
    3051                         if q == 'VA': x = -x # South is positive in MUX
    3052                         #print q+" q_time[%d, %d] = %f" %(time, point_i,
    3053                                                       #q_time[time, point_i])
    3054                         assert abs(q_time[time, point_i]-x)<epsilon
    3055 
    3056             f.close()
    3057                            
    3058         permutation = ensure_numeric([4,0,2])
    3059                    
    3060         # Create ordering file
    3061 #         _, ordering_filename = tempfile.mkstemp('')
    3062 #         order_fid = open(ordering_filename, 'w') 
    3063 #         order_fid.write('index, longitude, latitude\n')
    3064 #         for index in permutation:
    3065 #             order_fid.write('%d, %f, %f\n' %(index,
    3066 #                                              lat_long_points[index][1],
    3067 #                                              lat_long_points[index][0]))
    3068 #         order_fid.close()
    3069        
    3070         # -------------------------------------
    3071         # Now read files back and check values
    3072         weights = ensure_numeric([1.0])
    3073 
    3074         # For each quantity read the associated list of source mux2 file with
    3075         # extention associated with that quantity
    3076         file_params=-1*num.ones(3,num.float) # [nsta,dt,nt]
    3077         OFFSET = 5
    3078 
    3079         for j, file in enumerate(filesII):
    3080             # Read stage, u, v enumerated as j
    3081             #print 'Reading', j, file
    3082             #print "file", file
    3083             data = read_mux2(1, [file], weights, file_params,
    3084                              permutation, verbose)
    3085             #print str(j) + "data", data
    3086 
    3087             #print 'Data received by Python'
    3088             #print data[1][8]
    3089             number_of_selected_stations = data.shape[0]
    3090             #print "number_of_selected_stations", number_of_selected_stations
    3091             #print "stations", stations
    3092 
    3093             # Index where data ends and parameters begin
    3094             parameters_index = data.shape[1]-OFFSET         
    3095                  
    3096             for i in range(number_of_selected_stations):
    3097        
    3098                 #print i, parameters_index
    3099                 if j == 0:
    3100                     assert num.allclose(data[i][:parameters_index],
    3101                                         ha1[permutation[i], :])
    3102                    
    3103                 if j == 1: assert num.allclose(data[i][:parameters_index], ua1[permutation[i], :])
    3104                 if j == 2:
    3105                     assert num.allclose(data[i][:parameters_index], -va1[permutation[i], :])
    3106        
    3107         self.delete_mux(filesII)           
     1813        os.remove(root + '_100.dem')     
    31081814       
    31091815    def test_urs2sts0(self):
Note: See TracChangeset for help on using the changeset viewer.