Changeset 7776


Ignore:
Timestamp:
Jun 3, 2010, 6:03:07 PM (14 years ago)
Author:
hudson
Message:

Removed redundant data_manager class. Unit tests are running, but may fail.

Location:
trunk/anuga_core/source/anuga
Files:
3 added
1 deleted
19 edited

Legend:

Unmodified
Added
Removed
  • trunk/anuga_core/source/anuga/__init__.py

    r7775 r7776  
    2828from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular_cross
    2929
    30 from anuga.shallow_water.data_manager import export_grid
    31 from anuga.shallow_water.data_manager import csv2building_polygons
     30from anuga.file.csv_file import csv2building_polygons
    3231
    3332from anuga.file.sts import create_sts_boundary
     
    8079from anuga.file_conversion.dem2pts import dem2pts                   
    8180from anuga.file_conversion.esri2sww import esri2sww   
    82 from anuga.file_conversion.sww2dem import sww2dem   
     81from anuga.file_conversion.sww2dem import sww2dem, sww2dem_batch
    8382from anuga.file_conversion.asc2dem import asc2dem     
    8483from anuga.file_conversion.ferret2sww import ferret2sww     
     84from anuga.file_conversion.dem2dem import dem2dem
    8585
    8686#-----------------------------
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/test_gauge.py

    r7735 r7776  
    1717from anuga.pmesh.mesh import Mesh
    1818from anuga.shallow_water import Domain, Transmissive_boundary
    19 from anuga.shallow_water.sww_file import SWW_file
    20 from anuga.shallow_water.file_conversion import timefile2netcdf
     19from anuga.file.sww import SWW_file
     20from anuga.file_conversion.file_conversion import timefile2netcdf
    2121from anuga.utilities.file_utils import del_dir
    2222from csv import reader,writer
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/test_util.py

    r7737 r7776  
    1010from anuga.config import epsilon
    1111from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
    12 from anuga.shallow_water.file_conversion import timefile2netcdf
     12from anuga.file_conversion.file_conversion import timefile2netcdf
    1313from anuga.utilities.file_utils import del_dir
    1414
     
    1919from anuga.pmesh.mesh import Mesh
    2020from anuga.shallow_water import Domain, Transmissive_boundary
    21 from anuga.shallow_water.sww_file import SWW_file
     21from anuga.file.sww import SWW_file
    2222from csv import reader,writer
    2323import time
  • trunk/anuga_core/source/anuga/damage_modelling/test_inundation_damage.py

    r7743 r7776  
    1515from anuga.shallow_water import Domain, Transmissive_boundary
    1616from anuga.utilities.numerical_tools import mean
    17 from anuga.shallow_water.sww_file import SWW_file
     17from anuga.file.sww import SWW_file
    1818
    1919import numpy as num
  • trunk/anuga_core/source/anuga/file/__init__.py

    r7760 r7776  
    1 """Make directory available as a Python package
     1"""Common file modules.
     2
     3    Here you will find modules to load, save, and extract information in a
     4    variety of different file formats.
     5   
     6This module takes care of reading and writing datafiles such as topograhies,
     7model output, etc
     8
     9
     10Formats used within AnuGA:
     11
     12.sww: Netcdf format for storing model output f(t,x,y)
     13.tms: Netcdf format for storing time series f(t)
     14
     15.csv: ASCII format for storing arbitrary points and associated attributes
     16.pts: NetCDF format for storing arbitrary points and associated attributes
     17
     18.asc: ASCII format of regular DEMs as output from ArcView
     19.prj: Associated ArcView file giving more meta data for asc format
     20.ers: ERMapper header format of regular DEMs for ArcView
     21
     22.dem: NetCDF representation of regular DEM data
     23
     24.tsh: ASCII format for storing meshes and associated boundary and region info
     25.msh: NetCDF format for storing meshes and associated boundary and region info
     26
     27.nc: Native ferret NetCDF format
     28
     29
     30A typical dataflow can be described as follows
     31
     32Manually created files:
     33ASC, PRJ:     Digital elevation models (gridded)
     34TSH:          Triangular meshes (e.g. created from anuga.pmesh)
     35NC            Model outputs for use as boundary conditions (e.g from MOST)
     36
     37
     38AUTOMATICALLY CREATED FILES:
     39
     40ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
     41
     42NC -> SWW: Conversion of MOST bundary files to boundary sww
     43
     44PTS + TSH -> TSH with elevation: Least squares fit
     45
     46TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
     47
     48TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes   
    249"""
    350
  • trunk/anuga_core/source/anuga/file/csv_file.py

    r7772 r7776  
    22    A set of functions which extend the capabilities of the Python csv
    33    module.
     4   
     5    CSV files have the extension .csv, which stands for Comma Separated Value
     6    file. There is no standardised form for this format, so the user is provided
     7    with a variety of options for parsing different styles of csv files.
    48   
    59    These have been left as functions to aviod confusion with the standard
     
    236240        log.critical(msg)
    237241
     242
     243
     244def csv2building_polygons(file_name,
     245                          floor_height=3,
     246                          clipping_polygons=None):
     247    """
     248    Convert CSV files of the form:
     249
     250    easting,northing,id,floors
     251    422664.22,870785.46,2,0
     252    422672.48,870780.14,2,0
     253    422668.17,870772.62,2,0
     254    422660.35,870777.17,2,0
     255    422664.22,870785.46,2,0
     256    422661.30,871215.06,3,1
     257    422667.50,871215.70,3,1
     258    422668.30,871204.86,3,1
     259    422662.21,871204.33,3,1
     260    422661.30,871215.06,3,1
     261
     262    to a dictionary of polygons with id as key.
     263    The associated number of floors are converted to m above MSL and
     264    returned as a separate dictionary also keyed by id.
     265   
     266    Optional parameter floor_height is the height of each building story.
     267    Optional parameter clipping_olygons is a list of polygons selecting
     268    buildings. Any building not in these polygons will be omitted.
     269   
     270    See csv2polygons for more details
     271    """
     272
     273    polygons, values = csv2polygons(file_name,
     274                                    value_name='floors',
     275                                    clipping_polygons=None)   
     276
     277   
     278    heights = {}
     279    for key in values.keys():
     280        v = float(values[key])
     281        heights[key] = v*floor_height
     282       
     283    return polygons, heights               
     284           
     285
     286##
     287# @brief Convert CSV file into a dictionary of polygons and associated values.
     288# @param filename The path to the file to read, value_name name for the 4th column
     289def csv2polygons(file_name,
     290                 value_name='value',
     291                 clipping_polygons=None):
     292    """
     293    Convert CSV files of the form:
     294
     295    easting,northing,id,value
     296    422664.22,870785.46,2,0
     297    422672.48,870780.14,2,0
     298    422668.17,870772.62,2,0
     299    422660.35,870777.17,2,0
     300    422664.22,870785.46,2,0
     301    422661.30,871215.06,3,1
     302    422667.50,871215.70,3,1
     303    422668.30,871204.86,3,1
     304    422662.21,871204.33,3,1
     305    422661.30,871215.06,3,1
     306
     307    to a dictionary of polygons with id as key.
     308    The associated values are returned as a separate dictionary also keyed by id.
     309
     310
     311    easting: x coordinate relative to zone implied by the model
     312    northing: y coordinate relative to zone implied by the model   
     313    id: tag for polygon comprising points with this tag
     314    value: numeral associated with each polygon. These must be the same for all points in each polygon.
     315   
     316    The last header, value, can take on other names such as roughness, floors, etc - or it can be omitted
     317    in which case the returned values will be None
     318   
     319    Eastings and Northings will be returned as floating point values while
     320    id and values will be returned as strings.
     321
     322    Optional argument: clipping_polygons will select only those polygons that are
     323    fully within one or more of the clipping_polygons. In other words any polygon from
     324    the csv file which has at least one point not inside one of the clipping polygons
     325    will be excluded
     326   
     327    See underlying function load_csv_as_dict for more details.
     328    """
     329
     330    X, _ = load_csv_as_dict(file_name)
     331
     332    msg = 'Polygon csv file must have 3 or 4 columns'
     333    assert len(X.keys()) in [3, 4], msg
     334   
     335    msg = 'Did not find expected column header: easting'
     336    assert 'easting' in X.keys(), msg
     337   
     338    msg = 'Did not find expected column header: northing'   
     339    assert 'northing' in X.keys(), northing
     340   
     341    msg = 'Did not find expected column header: northing'       
     342    assert 'id' in X.keys(), msg
     343   
     344    if value_name is not None:
     345        msg = 'Did not find expected column header: %s' % value_name       
     346        assert value_name in X.keys(), msg   
     347   
     348    polygons = {}
     349    if len(X.keys()) == 4:
     350        values = {}
     351    else:
     352        values = None
     353
     354    # Loop through entries and compose polygons
     355    excluded_polygons={}
     356    past_ids = {}
     357    last_id = None
     358    for i, id in enumerate(X['id']):
     359
     360        # Check for duplicate polygons
     361        if id in past_ids:
     362            msg = 'Polygon %s was duplicated in line %d' % (id, i)
     363            raise Exception, msg
     364       
     365        if id not in polygons:
     366            # Start new polygon
     367            polygons[id] = []
     368            if values is not None:
     369                values[id] = X[value_name][i]
     370
     371            # Keep track of previous polygon ids
     372            if last_id is not None:
     373                past_ids[last_id] = i
     374           
     375        # Append this point to current polygon
     376        point = [float(X['easting'][i]), float(X['northing'][i])]
     377
     378        if clipping_polygons is not None:
     379            exclude=True
     380            for clipping_polygon in clipping_polygons:
     381                if inside_polygon(point, clipping_polygon):
     382                    exclude=False
     383                    break
     384               
     385            if exclude is True:
     386                excluded_polygons[id]=True
     387
     388        polygons[id].append(point)   
     389           
     390        # Check that value is the same across each polygon
     391        msg = 'Values must be the same across each polygon.'
     392        msg += 'I got %s in line %d but it should have been %s' % (X[value_name][i], i, values[id])
     393        assert values[id] == X[value_name][i], msg
     394
     395        last_id = id
     396
     397    # Weed out polygons that were not wholly inside clipping polygons
     398    for id in excluded_polygons:
     399        del polygons[id]
     400       
     401    return polygons, values
     402
     403
     404           
     405
     406
  • trunk/anuga_core/source/anuga/file/sww.py

    r7770 r7776  
    586586# @brief A class to write an SWW file.
    587587class Write_sww:
    588     from anuga.shallow_water.shallow_water_domain import Domain
    589 
     588   
    590589    RANGE = '_range'
    591590    EXTREMA = ':extrema'
  • trunk/anuga_core/source/anuga/file_conversion/__init__.py

    r7765 r7776  
    1 """Make directory available as a Python package
     1""" Modules for performing conversions between file types, or for
     2    resampling a given file.
    23"""
    34
  • trunk/anuga_core/source/anuga/file_conversion/ferret2sww.py

    r7743 r7776  
    88# ANUGA modules
    99from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_float
    10 from anuga.shallow_water.sww_file import Write_sww 
     10from anuga.file.sww import Write_sww 
    1111from anuga.coordinate_transforms.geo_reference import Geo_reference, \
    1212     write_NetCDF_georeference                         
  • trunk/anuga_core/source/anuga/file_conversion/file_conversion.py

    r7770 r7776  
    2424
    2525#shallow water imports
    26 from anuga.shallow_water.sww_file import Read_sww, Write_sww
     26from anuga.file.sww import Read_sww, Write_sww
    2727from anuga.shallow_water.shallow_water_domain import Domain
    2828from anuga.shallow_water.shallow_water_domain import Domain
  • trunk/anuga_core/source/anuga/file_conversion/sww2dem.py

    r7744 r7776  
    414414
    415415        return basename_out
     416
     417
     418
     419
     420##
     421# @brief
     422# @param basename_in
     423# @param extra_name_out
     424# @param quantities
     425# @param timestep
     426# @param reduction
     427# @param cellsize
     428# @param number_of_decimal_places
     429# @param NODATA_value
     430# @param easting_min
     431# @param easting_max
     432# @param northing_min
     433# @param northing_max
     434# @param verbose
     435# @param origin
     436# @param datum
     437# @param format
     438# @return
     439def sww2dem_batch(basename_in, extra_name_out=None,
     440                quantities=None, # defaults to elevation
     441                reduction=None,
     442                cellsize=10,
     443                number_of_decimal_places=None,
     444                NODATA_value=-9999,
     445                easting_min=None,
     446                easting_max=None,
     447                northing_min=None,
     448                northing_max=None,
     449                verbose=False,
     450                origin=None,
     451                datum='WGS84',
     452                format='ers'):
     453    """Wrapper for sww2dem.
     454    See sww2dem to find out what most of the parameters do.
     455
     456    Quantities is a list of quantities.  Each quantity will be
     457    calculated for each sww file.
     458
     459    This returns the basenames of the files returned, which is made up
     460    of the dir and all of the file name, except the extension.
     461
     462    This function returns the names of the files produced.
     463
     464    It will also produce as many output files as there are input sww files.
     465    """
     466
     467    if quantities is None:
     468        quantities = ['elevation']
     469
     470    if type(quantities) is str:
     471            quantities = [quantities]
     472
     473    # How many sww files are there?
     474    dir, base = os.path.split(basename_in)
     475
     476    iterate_over = get_all_swwfiles(dir, base, verbose)
     477
     478    if dir == "":
     479        dir = "." # Unix compatibility
     480
     481    files_out = []
     482    for sww_file in iterate_over:
     483        for quantity in quantities:
     484            if extra_name_out is None:
     485                basename_out = sww_file + '_' + quantity
     486            else:
     487                basename_out = sww_file + '_' + quantity + '_' + extra_name_out
     488
     489            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
     490                               quantity,
     491                               reduction,
     492                               cellsize,
     493                               number_of_decimal_places,
     494                               NODATA_value,
     495                               easting_min,
     496                               easting_max,
     497                               northing_min,
     498                               northing_max,
     499                               verbose,
     500                               origin,
     501                               datum,
     502                               format)
     503            files_out.append(file_out)
     504    return files_out
     505
  • trunk/anuga_core/source/anuga/file_conversion/test_2pts.py

    r7758 r7776  
    88from anuga.shallow_water.shallow_water_domain import Domain
    99from anuga.coordinate_transforms.geo_reference import Geo_reference
    10 from anuga.shallow_water.sww_file import SWW_file
     10from anuga.file.sww import Write_sww, SWW_file
    1111from anuga.abstract_2d_finite_volumes.generic_boundary_conditions \
    1212                            import Transmissive_boundary
  • trunk/anuga_core/source/anuga/file_conversion/test_file_conversion.py

    r7770 r7776  
    77from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
    88from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
    9 from anuga.shallow_water.sww_file import SWW_file
     9from anuga.file.sww import SWW_file
    1010from anuga.file.sww import extent_sww
    1111from anuga.config import netcdf_float, epsilon, g
     
    1313from anuga.file_conversion.file_conversion import tsh2sww, \
    1414                        pmesh_to_domain_instance
     15
    1516
    1617from anuga.file.mux import WAVEHEIGHT_MUX_LABEL, EAST_VELOCITY_LABEL, \
  • trunk/anuga_core/source/anuga/file_conversion/test_sww2dem.py

    r7744 r7776  
    99     DEFAULT_ZONE
    1010
    11 from anuga.shallow_water.sww_file import SWW_file
     11from anuga.file.sww import SWW_file
    1212     
    1313# boundary functions
  • trunk/anuga_core/source/anuga/file_conversion/urs2nc.py

    r7765 r7776  
    133133
    134134
     135
     136##
     137# @brief
     138# @param long_lat_dep
     139# @return A tuple (long, lat, quantity).
     140# @note The latitude is the fastest varying dimension - in mux files.
     141def lon_lat2grid(long_lat_dep):
     142    """
     143    given a list of points that are assumed to be an a grid,
     144    return the long's and lat's of the grid.
     145    long_lat_dep is an array where each row is a position.
     146    The first column is longitudes.
     147    The second column is latitudes.
     148
     149    The latitude is the fastest varying dimension - in mux files
     150    """
     151
     152    LONG = 0
     153    LAT = 1
     154    QUANTITY = 2
     155
     156    long_lat_dep = ensure_numeric(long_lat_dep, num.float)
     157
     158    num_points = long_lat_dep.shape[0]
     159    this_rows_long = long_lat_dep[0,LONG]
     160
     161    # Count the length of unique latitudes
     162    i = 0
     163    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
     164        i += 1
     165
     166    # determine the lats and longsfrom the grid
     167    lat = long_lat_dep[:i, LAT]
     168    long = long_lat_dep[::i, LONG]
     169
     170    lenlong = len(long)
     171    lenlat = len(lat)
     172
     173    msg = 'Input data is not gridded'
     174    assert num_points % lenlat == 0, msg
     175    assert num_points % lenlong == 0, msg
     176
     177    # Test that data is gridded
     178    for i in range(lenlong):
     179        msg = 'Data is not gridded.  It must be for this operation'
     180        first = i * lenlat
     181        last = first + lenlat
     182
     183        assert num.allclose(long_lat_dep[first:last,LAT], lat), msg
     184        assert num.allclose(long_lat_dep[first:last,LONG], long[i]), msg
     185
     186    msg = 'Out of range latitudes/longitudes'
     187    for l in lat:assert -90 < l < 90 , msg
     188    for l in long:assert -180 < l < 180 , msg
     189
     190    # Changing quantity from lat being the fastest varying dimension to
     191    # long being the fastest varying dimension
     192    # FIXME - make this faster/do this a better way
     193    # use numeric transpose, after reshaping the quantity vector
     194    quantity = num.zeros(num_points, num.float)
     195
     196    for lat_i, _ in enumerate(lat):
     197        for long_i, _ in enumerate(long):
     198            q_index = lat_i*lenlong + long_i
     199            lld_index = long_i*lenlat + lat_i
     200            temp = long_lat_dep[lld_index, QUANTITY]
     201            quantity[q_index] = temp
     202
     203    return long, lat, quantity
     204
     205
     206
  • trunk/anuga_core/source/anuga/file_conversion/urs2sww.py

    r7770 r7776  
    1818                            netcdf_float
    1919
    20 from sww_file import Read_sww, Write_sww
     20from anuga.file.sww import Write_sww 
    2121
    2222
  • trunk/anuga_core/source/anuga/fit_interpolate/test_interpolate.py

    r7737 r7776  
    2424from anuga.shallow_water import Domain, Transmissive_boundary
    2525from anuga.utilities.numerical_tools import mean, NAN
    26 from anuga.shallow_water.sww_file import SWW_file
     26from anuga.file.sww import SWW_file
    2727from anuga.shallow_water.shallow_water_domain import Domain
    2828from abstract_2d_finite_volumes.quantity import Quantity
  • trunk/anuga_core/source/anuga/shallow_water/shallow_water_domain.py

    r7736 r7776  
    9090from anuga.utilities.numerical_tools import gradient, mean, ensure_numeric
    9191from anuga.geospatial_data.geospatial_data import ensure_geospatial
    92 
     92from anuga.file.sww import SWW_file 
    9393from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
    9494
     
    664664        Also, save x,y and bed elevation
    665665        """
    666 
    667         from anuga.shallow_water.sww_file import SWW_file
    668666       
    669667        # Initialise writer
  • trunk/anuga_core/source/anuga/shallow_water/test_data_manager.py

    r7772 r7776  
    22#
    33
    4 # This file was reverted from changeset:5484 to changeset:5470 on 10th July
    5 # by Ole.
     4"""
     5Set of tests for the now-defunct data manager module.
     6
     7These could be split up into their correct modules.
     8"""
    69
    710import unittest
    811import copy
    912import numpy as num
     13import sys
    1014               
    1115import tempfile
     
    1620from Scientific.IO.NetCDF import NetCDFFile
    1721
     22
    1823from anuga.anuga_exceptions import ANUGAError
    19 from anuga.shallow_water.data_manager import *
    20 from anuga.shallow_water.sww_file import SWW_file
     24from anuga.file.sww import SWW_file
    2125from anuga.coordinate_transforms.geo_reference import Geo_reference                       
    2226from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees
    2327from anuga.abstract_2d_finite_volumes.util import file_function
    24 from anuga.utilities.system_tools import get_pathname_from_package
     28from anuga.utilities.system_tools import get_pathname_from_package, \
     29                                            get_revision_number
    2530from anuga.utilities.file_utils import del_dir
    2631from anuga.utilities.numerical_tools import ensure_numeric, mean
     
    4146
    4247# This is needed to run the tests of local functions
    43 import data_manager
    4448from anuga.file_conversion.urs2sts import urs2sts
    4549from anuga.coordinate_transforms.redfearn import redfearn
     
    4751     DEFAULT_ZONE
    4852from anuga.geospatial_data.geospatial_data import Geospatial_data
     53
     54from shallow_water_domain import Domain
    4955
    5056# use helper methods from other unit test
Note: See TracChangeset for help on using the changeset viewer.