Changeset 7735


Ignore:
Timestamp:
May 20, 2010, 12:36:35 PM (15 years ago)
Author:
James Hudson
Message:

Split up some of the huge modules in shallow_water, fixed most of the unit test dependencies.

Location:
anuga_core/source/anuga
Files:
2 added
5 deleted
15 edited

Legend:

Unmodified
Added
Removed
  • anuga_core/source/anuga/abstract_2d_finite_volumes/test_gauge.py

    r7693 r7735  
    1010from anuga.abstract_2d_finite_volumes.gauge import *
    1111from anuga.config import epsilon
    12 from anuga.shallow_water.data_manager import timefile2netcdf,del_dir
    1312
    1413from anuga.utilities.numerical_tools import NAN,mean
     
    1817from anuga.pmesh.mesh import Mesh
    1918from anuga.shallow_water import Domain, Transmissive_boundary
    20 from anuga.shallow_water.data_manager import SWW_file
     19from anuga.shallow_water.sww_file import SWW_file
     20from anuga.shallow_water.file_conversion import timefile2netcdf
     21from anuga.utilities.file_utils import del_dir
    2122from csv import reader,writer
    2223import time
  • anuga_core/source/anuga/abstract_2d_finite_volumes/test_generic_boundary_conditions.py

    r7573 r7735  
    404404
    405405        #Convert ASCII file to NetCDF (Which is what we really like!)
    406         from anuga.shallow_water.data_manager import timefile2netcdf
     406        from anuga.shallow_water.file_conversion import timefile2netcdf
    407407       
    408408        timefile2netcdf(filename, quantity_names = ['stage', 'xmomentum'])
  • anuga_core/source/anuga/abstract_2d_finite_volumes/test_util.py

    r7695 r7735  
    99from anuga.abstract_2d_finite_volumes.util import *
    1010from anuga.config import epsilon
    11 from anuga.shallow_water.data_manager import timefile2netcdf,del_dir
     11from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
     12from anuga.shallow_water.file_conversion import timefile2netcdf
     13from anuga.utilities.file_utils import del_dir
    1214
    1315from anuga.utilities.numerical_tools import NAN
     
    1719from anuga.pmesh.mesh import Mesh
    1820from anuga.shallow_water import Domain, Transmissive_boundary
    19 from anuga.shallow_water.data_manager import SWW_file
     21from anuga.shallow_water.sww_file import SWW_file
    2022from csv import reader,writer
    2123import time
  • anuga_core/source/anuga/abstract_2d_finite_volumes/util.py

    r7711 r7735  
    650650
    651651    from os import sep
    652     from anuga.shallow_water.data_manager import \
    653                                get_all_files_with_extension, csv2dict
     652    from anuga.utilities.file_utils import load_csv_as_dict, \
     653                                get_all_files_with_extension
    654654
    655655    seconds_in_hour = 3600
     
    955955    """
    956956
    957     from anuga.shallow_water.data_manager import get_all_directories_with_name,\
    958                                                  get_maximum_inundation_data,\
    959                                                  csv2dict
     957    from anuga.shallow_water.data_manager import \
     958        get_maximum_inundation_data, csv2dict
    960959                                                 
    961960    file = open(runup_filename, "w")
  • anuga_core/source/anuga/culvert_flows/culvert_class.py

    r7711 r7735  
    11import sys
    22
    3 from anuga.shallow_water.shallow_water_domain import Inflow, General_forcing
     3from anuga.shallow_water.forcing import Inflow, General_forcing
    44from anuga.culvert_flows.culvert_polygons import create_culvert_polygons
    55from anuga.utilities.system_tools import log_to_file
  • anuga_core/source/anuga/culvert_flows/new_culvert_class.py

    r7711 r7735  
    11import sys
    22
    3 from anuga.shallow_water.shallow_water_domain import Inflow, General_forcing
     3from anuga.shallow_water.forcing import Inflow, General_forcing
    44from anuga.culvert_flows.culvert_polygons import create_culvert_polygons
    55from anuga.utilities.system_tools import log_to_file
  • anuga_core/source/anuga/damage_modelling/test_inundation_damage.py

    r7342 r7735  
    1515from anuga.shallow_water import Domain, Transmissive_boundary
    1616from anuga.utilities.numerical_tools import mean
    17 from anuga.shallow_water.data_manager import SWW_file
     17from anuga.shallow_water.sww_file import SWW_file
    1818
    1919import numpy as num
     
    293293        fd = open(csv_file,'wb')
    294294        writer = csv.writer(fd)
    295         writer.writerow(['x','y',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL])
     295        writer.writerow(['x', 'y', STR_VALUE_LABEL, CONT_VALUE_LABEL, \
     296        'ROOF_TYPE', WALL_TYPE_LABEL, SHORE_DIST_LABEL])
    296297        writer.writerow([5.5,0.5,'10','130000','Metal','Timber',20])
    297298        writer.writerow([4.5,1.0,'150','76000','Metal','Double Brick',20])
  • anuga_core/source/anuga/fit_interpolate/test_interpolate.py

    r7722 r7735  
    2424from anuga.shallow_water import Domain, Transmissive_boundary
    2525from anuga.utilities.numerical_tools import mean, NAN
    26 from anuga.shallow_water.data_manager import SWW_file
     26from anuga.shallow_water.sww_file import SWW_file
    2727from anuga.geospatial_data.geospatial_data import Geospatial_data
    2828from anuga.pmesh.mesh import Mesh
  • anuga_core/source/anuga/shallow_water/boundaries.py

    r7732 r7735  
    188188
    189189    def __init__(self, domain=None, function=None):
    190         """ Instantiate a Transmissive_n_momentum_zero_t_momentum_set_stage_boundary.
     190        """ Instantiate a
     191            Transmissive_n_momentum_zero_t_momentum_set_stage_boundary.
    191192            domain is the domain containing the boundary
    192193            function is the function to apply
     
    209210    def __repr__(self):
    210211        """ Return a representation of this instance. """
    211         return 'Transmissive_n_momentum_zero_t_momentum_set_stage_boundary(%s)' %self.domain
     212        msg='Transmissive_n_momentum_zero_t_momentum_set_stage_boundary'
     213        msg+='(%s)' %self.domain
     214        return msg
    212215
    213216
     
    393396           
    394397        # Compute depth based on Manning's formula v = 1/n h^{2/3} sqrt(S)
    395         # Where v is velocity, n is manning's coefficient, h is depth and S is the slope into the domain.
     398        # Where v is velocity, n is manning's coefficient, h is depth
     399        # and S is the slope into the domain.
    396400        # Let mu be the momentum (vh), then this equation becomes: mu = 1/n h^{5/3} sqrt(S)
    397401        # from which we can isolate depth to get
  • anuga_core/source/anuga/shallow_water/data_manager.py

    r7711 r7735  
    9494import anuga.utilities.log as log
    9595
     96from anuga.utilities.file_utils import create_filename,\
     97                        get_all_swwfiles, load_csv_as_dict
     98from sww_file import Read_sww, Write_sww
     99
    96100
    97101# Default block size for sww2dem()
     
    118122                    'speed': \
    119123 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
    120 
    121 
    122 ##
    123 # @brief Convert a possible filename into a standard form.
    124 # @param s Filename to process.
    125 # @return The new filename string.
    126 def make_filename(s):
    127     """Transform argument string into a Sexsuitable filename
    128     """
    129 
    130     s = s.strip()
    131     s = s.replace(' ', '_')
    132     s = s.replace('(', '')
    133     s = s.replace(')', '')
    134     s = s.replace('__', '_')
    135 
    136     return s
    137 
    138 
    139 ##
    140 # @brief Check that a specified filesystem directory path exists.
    141 # @param path The dirstory path to check.
    142 # @param verbose True if this function is to be verbose.
    143 # @note If directory path doesn't exist, it will be created.
    144 def check_dir(path, verbose=None):
    145     """Check that specified path exists.
    146     If path does not exist it will be created if possible
    147 
    148     USAGE:
    149        checkdir(path, verbose):
    150 
    151     ARGUMENTS:
    152         path -- Directory
    153         verbose -- Flag verbose output (default: None)
    154 
    155     RETURN VALUE:
    156         Verified path including trailing separator
    157     """
    158 
    159     import os.path
    160 
    161     if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
    162         unix = 0
    163     else:
    164         unix = 1
    165 
    166     # add terminal separator, if it's not already there
    167     if path[-1] != os.sep:
    168         path = path + os.sep
    169 
    170     # expand ~ or ~username in path
    171     path = os.path.expanduser(path)
    172 
    173     # create directory if required
    174     if not (os.access(path, os.R_OK and os.W_OK) or path == ''):
    175         try:
    176             exitcode = os.mkdir(path)
    177 
    178             # Change access rights if possible
    179             if unix:
    180                 exitcode = os.system('chmod 775 ' + path)
    181             else:
    182                 pass  # FIXME: What about access rights under Windows?
    183 
    184             if verbose: log.critical('MESSAGE: Directory %s created.' % path)
    185         except:
    186             log.critical('WARNING: Directory %s could not be created.' % path)
    187             if unix:
    188                 path = '/tmp/'
    189             else:
    190                 path = 'C:' + os.sep
    191 
    192             log.critical("Using directory '%s' instead" % path)
    193 
    194     return path
    195 
    196 
    197 ##
    198 # @brief Delete directory and all sub-directories.
    199 # @param path Path to the directory to delete.
    200 def del_dir(path):
    201     """Recursively delete directory path and all its contents
    202     """
    203 
    204     if os.path.isdir(path):
    205         for file in os.listdir(path):
    206             X = os.path.join(path, file)
    207 
    208             if os.path.isdir(X) and not os.path.islink(X):
    209                 del_dir(X)
    210             else:
    211                 try:
    212                     os.remove(X)
    213                 except:
    214                     log.critical("Could not remove file %s" % X)
    215 
    216         os.rmdir(path)
    217 
    218 
    219 ##
    220 # @brief ??
    221 # @param path
    222 # @param __func__
    223 # @param verbose True if this function is to be verbose.
    224 # @note ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007
    225 def rmgeneric(path, func, verbose=False):
    226     ERROR_STR= """Error removing %(path)s, %(error)s """
    227 
    228     try:
    229         func(path)
    230         if verbose: log.critical('Removed %s' % path)
    231     except OSError, (errno, strerror):
    232         log.critical(ERROR_STR % {'path' : path, 'error': strerror })
    233 
    234 
    235 ##
    236 # @brief Remove directory and all sub-directories.
    237 # @param path Filesystem path to directory to remove.
    238 # @param verbose True if this function is to be verbose.
    239 def removeall(path, verbose=False):
    240     if not os.path.isdir(path):
    241         return
    242 
    243     for x in os.listdir(path):
    244         fullpath = os.path.join(path, x)
    245         if os.path.isfile(fullpath):
    246             f = os.remove
    247             rmgeneric(fullpath, f)
    248         elif os.path.isdir(fullpath):
    249             removeall(fullpath)
    250             f = os.rmdir
    251             rmgeneric(fullpath, f, verbose)
    252 
    253 
    254 ##
    255 # @brief Create a standard filename.
    256 # @param datadir Directory where file is to be created.
    257 # @param filename Filename 'stem'.
    258 # @param format Format of the file, becomes filename extension.
    259 # @param size Size of file, becomes part of filename.
    260 # @param time Time (float), becomes part of filename.
    261 # @return The complete filename path, including directory.
    262 # @note The containing directory is created, if necessary.
    263 def create_filename(datadir, filename, format, size=None, time=None):
    264     FN = check_dir(datadir) + filename
    265 
    266     if size is not None:
    267         FN += '_size%d' % size
    268 
    269     if time is not None:
    270         FN += '_time%.2f' % time
    271 
    272     FN += '.' + format
    273 
    274     return FN
    275 
    276 
    277 ##
    278 # @brief Get all files with a standard name and a given set of attributes.
    279 # @param datadir Directory files must be in.
    280 # @param filename Filename stem.
    281 # @param format Filename extension.
    282 # @param size Filename size.
    283 # @return A list of fielnames (including directory) that match the attributes.
    284 def get_files(datadir, filename, format, size):
    285     """Get all file (names) with given name, size and format
    286     """
    287 
    288     import glob
    289 
    290     dir = check_dir(datadir)
    291     pattern = dir + os.sep + filename + '_size=%d*.%s' % (size, format)
    292 
    293     return glob.glob(pattern)
    294 
    295 
    296 ##
    297 # @brief Generic class for storing output to e.g. visualisation or checkpointing
    298 class Data_format:
    299     """Generic interface to data formats
    300     """
    301 
    302     ##
    303     # @brief Instantiate this instance.
    304     # @param domain
    305     # @param extension
    306     # @param mode The mode of the underlying file.
    307     def __init__(self, domain, extension, mode=netcdf_mode_w):
    308         assert mode[0] in ['r', 'w', 'a'], \
    309                "Mode %s must be either:\n" % mode + \
    310                "   'w' (write)\n" + \
    311                "   'r' (read)\n" + \
    312                "   'a' (append)"
    313 
    314         #Create filename
    315         self.filename = create_filename(domain.get_datadir(),
    316                                         domain.get_name(), extension)
    317 
    318         self.timestep = 0
    319         self.domain = domain
    320 
    321         # Exclude ghosts in case this is a parallel domain
    322         self.number_of_nodes = domain.number_of_full_nodes
    323         self.number_of_volumes = domain.number_of_full_triangles
    324         #self.number_of_volumes = len(domain)
    325 
    326         #FIXME: Should we have a general set_precision function?
    327 
    328 
    329 ##
    330 # @brief Class for storing output to e.g. visualisation
    331 class SWW_file(Data_format):
    332     """Interface to native NetCDF format (.sww) for storing model output
    333 
    334     There are two kinds of data
    335 
    336     1: Constant data: Vertex coordinates and field values. Stored once
    337     2: Variable data: Conserved quantities. Stored once per timestep.
    338 
    339     All data is assumed to reside at vertex locations.
    340     """
    341 
    342     ##
    343     # @brief Instantiate this instance.
    344     # @param domain ??
    345     # @param mode Mode of the underlying data file.
    346     # @param max_size ??
    347     # @param recursion ??
    348     # @note Prepare the underlying data file if mode starts with 'w'.
    349     def __init__(self, domain,
    350                  mode=netcdf_mode_w, max_size=2000000000, recursion=False):
    351         from Scientific.IO.NetCDF import NetCDFFile
    352 
    353         self.precision = netcdf_float32 # Use single precision for quantities
    354         self.recursion = recursion
    355         self.mode = mode
    356         if hasattr(domain, 'max_size'):
    357             self.max_size = domain.max_size # File size max is 2Gig
    358         else:
    359             self.max_size = max_size
    360         if hasattr(domain, 'minimum_storable_height'):
    361             self.minimum_storable_height = domain.minimum_storable_height
    362         else:
    363             self.minimum_storable_height = default_minimum_storable_height
    364 
    365         # Call parent constructor
    366         Data_format.__init__(self, domain, 'sww', mode)
    367 
    368         # Get static and dynamic quantities from domain
    369         static_quantities = []
    370         dynamic_quantities = []
    371        
    372         for q in domain.quantities_to_be_stored:
    373             flag = domain.quantities_to_be_stored[q]
    374        
    375             msg = 'Quantity %s is requested to be stored ' % q
    376             msg += 'but it does not exist in domain.quantities'
    377             assert q in domain.quantities, msg
    378        
    379             assert flag in [1,2]
    380             if flag == 1: static_quantities.append(q)
    381             if flag == 2: dynamic_quantities.append(q)               
    382                        
    383        
    384         # NetCDF file definition
    385         fid = NetCDFFile(self.filename, mode)
    386         if mode[0] == 'w':
    387             description = 'Output from anuga.abstract_2d_finite_volumes ' \
    388                           'suitable for plotting'
    389                          
    390             self.writer = Write_sww(static_quantities, dynamic_quantities)
    391             self.writer.store_header(fid,
    392                                      domain.starttime,
    393                                      self.number_of_volumes,
    394                                      self.domain.number_of_full_nodes,
    395                                      description=description,
    396                                      smoothing=domain.smooth,
    397                                      order=domain.default_order,
    398                                      sww_precision=self.precision)
    399 
    400             # Extra optional information
    401             if hasattr(domain, 'texture'):
    402                 fid.texture = domain.texture
    403 
    404             if domain.quantities_to_be_monitored is not None:
    405                 fid.createDimension('singleton', 1)
    406                 fid.createDimension('two', 2)
    407 
    408                 poly = domain.monitor_polygon
    409                 if poly is not None:
    410                     N = len(poly)
    411                     fid.createDimension('polygon_length', N)
    412                     fid.createVariable('extrema.polygon',
    413                                        self.precision,
    414                                        ('polygon_length', 'two'))
    415                     fid.variables['extrema.polygon'][:] = poly
    416 
    417                 interval = domain.monitor_time_interval
    418                 if interval is not None:
    419                     fid.createVariable('extrema.time_interval',
    420                                        self.precision,
    421                                        ('two',))
    422                     fid.variables['extrema.time_interval'][:] = interval
    423 
    424                 for q in domain.quantities_to_be_monitored:
    425                     fid.createVariable(q + '.extrema', self.precision,
    426                                        ('numbers_in_range',))
    427                     fid.createVariable(q + '.min_location', self.precision,
    428                                        ('numbers_in_range',))
    429                     fid.createVariable(q + '.max_location', self.precision,
    430                                        ('numbers_in_range',))
    431                     fid.createVariable(q + '.min_time', self.precision,
    432                                        ('singleton',))
    433                     fid.createVariable(q + '.max_time', self.precision,
    434                                        ('singleton',))
    435 
    436         fid.close()
    437 
    438     ##
    439     # @brief Store connectivity data into the underlying data file.
    440     def store_connectivity(self):
    441         """Store information about nodes, triangles and static quantities
    442 
    443         Writes x,y coordinates of triangles and their connectivity.
    444        
    445         Store also any quantity that has been identified as static.
    446         """
    447 
    448         # FIXME: Change name to reflect the fact thta this function
    449         # stores both connectivity (triangulation) and static quantities
    450        
    451         from Scientific.IO.NetCDF import NetCDFFile
    452 
    453         domain = self.domain
    454 
    455         # append to the NetCDF file
    456         fid = NetCDFFile(self.filename, netcdf_mode_a)
    457 
    458         # Get X, Y from one (any) of the quantities
    459         Q = domain.quantities.values()[0]
    460         X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
    461 
    462         # store the connectivity data
    463         points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
    464         self.writer.store_triangulation(fid,
    465                                         points,
    466                                         V.astype(num.float32),
    467                                         points_georeference=\
    468                                             domain.geo_reference)
    469 
    470 
    471         # Get names of static quantities
    472         static_quantities = {}
    473         for name in self.writer.static_quantities:
    474             Q = domain.quantities[name]
    475             A, _ = Q.get_vertex_values(xy=False,
    476                                        precision=self.precision)
    477             static_quantities[name] = A
    478        
    479         # Store static quantities       
    480         self.writer.store_static_quantities(fid, **static_quantities)
    481                                            
    482         fid.close()
    483 
    484     ##
    485     # @brief Store time and time dependent quantities
    486     # to the underlying data file.
    487     def store_timestep(self):
    488         """Store time and time dependent quantities
    489         """
    490 
    491         from Scientific.IO.NetCDF import NetCDFFile
    492         import types
    493         from time import sleep
    494         from os import stat
    495 
    496         # Get NetCDF
    497         retries = 0
    498         file_open = False
    499         while not file_open and retries < 10:
    500             try:
    501                 # Open existing file
    502                 fid = NetCDFFile(self.filename, netcdf_mode_a)
    503             except IOError:
    504                 # This could happen if someone was reading the file.
    505                 # In that case, wait a while and try again
    506                 msg = 'Warning (store_timestep): File %s could not be opened' \
    507                       % self.filename
    508                 msg += ' - trying step %s again' % self.domain.time
    509                 log.critical(msg)
    510                 retries += 1
    511                 sleep(1)
    512             else:
    513                 file_open = True
    514 
    515         if not file_open:
    516             msg = 'File %s could not be opened for append' % self.filename
    517             raise DataFileNotOpenError, msg
    518 
    519         # Check to see if the file is already too big:
    520         time = fid.variables['time']
    521         i = len(time) + 1
    522         file_size = stat(self.filename)[6]
    523         file_size_increase = file_size / i
    524         if file_size + file_size_increase > self.max_size * 2**self.recursion:
    525             # In order to get the file name and start time correct,
    526             # I change the domain.filename and domain.starttime.
    527             # This is the only way to do this without changing
    528             # other modules (I think).
    529 
    530             # Write a filename addon that won't break the anuga viewers
    531             # (10.sww is bad)
    532             filename_ext = '_time_%s' % self.domain.time
    533             filename_ext = filename_ext.replace('.', '_')
    534 
    535             # Remember the old filename, then give domain a
    536             # name with the extension
    537             old_domain_filename = self.domain.get_name()
    538             if not self.recursion:
    539                 self.domain.set_name(old_domain_filename + filename_ext)
    540 
    541             # Temporarily change the domain starttime to the current time
    542             old_domain_starttime = self.domain.starttime
    543             self.domain.starttime = self.domain.get_time()
    544 
    545             # Build a new data_structure.
    546             next_data_structure = SWW_file(self.domain, mode=self.mode,
    547                                            max_size=self.max_size,
    548                                            recursion=self.recursion+1)
    549             if not self.recursion:
    550                 log.critical('    file_size = %s' % file_size)
    551                 log.critical('    saving file to %s'
    552                              % next_data_structure.filename)
    553 
    554             # Set up the new data_structure
    555             self.domain.writer = next_data_structure
    556 
    557             # Store connectivity and first timestep
    558             next_data_structure.store_connectivity()
    559             next_data_structure.store_timestep()
    560             fid.sync()
    561             fid.close()
    562 
    563             # Restore the old starttime and filename
    564             self.domain.starttime = old_domain_starttime
    565             self.domain.set_name(old_domain_filename)
    566         else:
    567             self.recursion = False
    568             domain = self.domain
    569 
    570             # Get the variables
    571             time = fid.variables['time']
    572             i = len(time)
    573              
    574             if 'stage' in self.writer.dynamic_quantities:           
    575                 # Select only those values for stage,
    576                 # xmomentum and ymomentum (if stored) where
    577                 # depth exceeds minimum_storable_height
    578                 #
    579                 # In this branch it is assumed that elevation
    580                 # is also available as a quantity           
    581            
    582                 Q = domain.quantities['stage']
    583                 w, _ = Q.get_vertex_values(xy=False)
    584                
    585                 Q = domain.quantities['elevation']
    586                 z, _ = Q.get_vertex_values(xy=False)               
    587                
    588                 storable_indices = (w-z >= self.minimum_storable_height)
    589             else:
    590                 # Very unlikely branch
    591                 storable_indices = None # This means take all
    592            
    593            
    594             # Now store dynamic quantities
    595             dynamic_quantities = {}
    596             for name in self.writer.dynamic_quantities:
    597                 netcdf_array = fid.variables[name]
    598                
    599                 Q = domain.quantities[name]
    600                 A, _ = Q.get_vertex_values(xy=False,
    601                                            precision=self.precision)
    602 
    603                 if storable_indices is not None:
    604                     if name == 'stage':
    605                         A = num.choose(storable_indices, (z, A))
    606 
    607                     if name in ['xmomentum', 'ymomentum']:
    608                         # Get xmomentum where depth exceeds
    609                         # minimum_storable_height
    610                        
    611                         # Define a zero vector of same size and type as A
    612                         # for use with momenta
    613                         null = num.zeros(num.size(A), A.dtype.char)
    614                         A = num.choose(storable_indices, (null, A))
    615                
    616                 dynamic_quantities[name] = A
    617                
    618                                        
    619             # Store dynamic quantities
    620             self.writer.store_quantities(fid,
    621                                          time=self.domain.time,
    622                                          sww_precision=self.precision,
    623                                          **dynamic_quantities)
    624 
    625 
    626             # Update extrema if requested
    627             domain = self.domain
    628             if domain.quantities_to_be_monitored is not None:
    629                 for q, info in domain.quantities_to_be_monitored.items():
    630                     if info['min'] is not None:
    631                         fid.variables[q + '.extrema'][0] = info['min']
    632                         fid.variables[q + '.min_location'][:] = \
    633                                         info['min_location']
    634                         fid.variables[q + '.min_time'][0] = info['min_time']
    635 
    636                     if info['max'] is not None:
    637                         fid.variables[q + '.extrema'][1] = info['max']
    638                         fid.variables[q + '.max_location'][:] = \
    639                                         info['max_location']
    640                         fid.variables[q + '.max_time'][0] = info['max_time']
    641 
    642             # Flush and close
    643             fid.sync()
    644             fid.close()
    645 
    646 
    647 ##
    648 # @brief Class to open an sww file so that domain can be populated with quantity values
    649 class Read_sww:
    650 
    651     def __init__(self, source):
    652         """The source parameter is assumed to be a NetCDF sww file.
    653         """
    654 
    655         self.source = source
    656 
    657         self.frame_number = 0
    658 
    659         fin = NetCDFFile(self.source, 'r')
    660 
    661         self.time = num.array(fin.variables['time'], num.float)
    662         self.last_frame_number = self.time.shape[0] - 1
    663 
    664         self.frames = num.arange(self.last_frame_number+1)
    665 
    666         fin.close()
    667        
    668         self.read_mesh()
    669 
    670         self.quantities = {}
    671 
    672         self.read_quantities()
    673 
    674 
    675     def read_mesh(self):
    676         fin = NetCDFFile(self.source, 'r')
    677 
    678         self.vertices = num.array(fin.variables['volumes'], num.int)
    679        
    680         self.x = x = num.array(fin.variables['x'], num.float)
    681         self.y = y = num.array(fin.variables['y'], num.float)
    682 
    683         assert len(self.x) == len(self.y)
    684        
    685         self.xmin = num.min(x)
    686         self.xmax = num.max(x)
    687         self.ymin = num.min(y)
    688         self.ymax = num.max(y)
    689 
    690 
    691 
    692         fin.close()
    693        
    694     def read_quantities(self, frame_number=0):
    695 
    696         assert frame_number >= 0 and frame_number <= self.last_frame_number
    697 
    698         self.frame_number = frame_number
    699 
    700         M = len(self.x)/3
    701        
    702         fin = NetCDFFile(self.source, 'r')
    703        
    704         for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
    705                         '_range' not in n, \
    706                         fin.variables.keys()):
    707             if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
    708                 self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
    709             else: # Time-varying, get the current timestep data
    710                 self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
    711         fin.close()
    712         return self.quantities
    713 
    714     def get_bounds(self):
    715         return [self.xmin, self.xmax, self.ymin, self.ymax]
    716 
    717     def get_last_frame_number(self):
    718         return self.last_frame_number
    719 
    720     def get_time(self):
    721         return self.time[self.frame_number]
    722 
    723 
    724 ##
    725 # @brief Class for handling checkpoints data
    726 # @note This is not operational at the moment
    727 class CPT_file(Data_format):
    728     """Interface to native NetCDF format (.cpt) to be
    729     used for checkpointing (one day)
    730     """
    731 
    732     ##
    733     # @brief Initialize this instantiation.
    734     # @param domain ??
    735     # @param mode Mode of underlying data file (default WRITE).
    736     def __init__(self, domain, mode=netcdf_mode_w):
    737         from Scientific.IO.NetCDF import NetCDFFile
    738 
    739         self.precision = netcdf_float #Use full precision
    740 
    741         Data_format.__init__(self, domain, 'sww', mode)
    742 
    743         # NetCDF file definition
    744         fid = NetCDFFile(self.filename, mode)
    745         if mode[0] == 'w':
    746             # Create new file
    747             fid.institution = 'Geoscience Australia'
    748             fid.description = 'Checkpoint data'
    749             #fid.smooth = domain.smooth
    750             fid.order = domain.default_order
    751 
    752             # Dimension definitions
    753             fid.createDimension('number_of_volumes', self.number_of_volumes)
    754             fid.createDimension('number_of_vertices', 3)
    755 
    756             # Store info at all vertices (no smoothing)
    757             fid.createDimension('number_of_points', 3*self.number_of_volumes)
    758             fid.createDimension('number_of_timesteps', None) #extensible
    759 
    760             # Variable definitions
    761 
    762             # Mesh
    763             fid.createVariable('x', self.precision, ('number_of_points',))
    764             fid.createVariable('y', self.precision, ('number_of_points',))
    765 
    766 
    767             fid.createVariable('volumes', netcdf_int, ('number_of_volumes',
    768                                                        'number_of_vertices'))
    769 
    770             fid.createVariable('time', self.precision, ('number_of_timesteps',))
    771 
    772             #Allocate space for all quantities
    773             for name in domain.quantities.keys():
    774                 fid.createVariable(name, self.precision,
    775                                    ('number_of_timesteps',
    776                                     'number_of_points'))
    777 
    778         fid.close()
    779 
    780     ##
    781     # @brief Store connectivity data to underlying data file.
    782     def store_checkpoint(self):
    783         """Write x,y coordinates of triangles.
    784         Write connectivity (
    785         constituting
    786         the bed elevation.
    787         """
    788 
    789         from Scientific.IO.NetCDF import NetCDFFile
    790 
    791         domain = self.domain
    792 
    793         #Get NetCDF
    794         fid = NetCDFFile(self.filename, netcdf_mode_a)
    795 
    796         # Get the variables
    797         x = fid.variables['x']
    798         y = fid.variables['y']
    799 
    800         volumes = fid.variables['volumes']
    801 
    802         # Get X, Y and bed elevation Z
    803         Q = domain.quantities['elevation']
    804         X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
    805 
    806         x[:] = X.astype(self.precision)
    807         y[:] = Y.astype(self.precision)
    808         z[:] = Z.astype(self.precision)
    809 
    810         volumes[:] = V
    811 
    812         fid.close()
    813 
    814     ##
    815     # @brief Store time and named quantities to underlying data file.
    816     # @param name
    817     def store_timestep(self, name):
    818         """Store time and named quantity to file
    819         """
    820 
    821         from Scientific.IO.NetCDF import NetCDFFile
    822         from time import sleep
    823 
    824         #Get NetCDF
    825         retries = 0
    826         file_open = False
    827         while not file_open and retries < 10:
    828             try:
    829                 fid = NetCDFFile(self.filename, netcdf_mode_a)
    830             except IOError:
    831                 #This could happen if someone was reading the file.
    832                 #In that case, wait a while and try again
    833                 msg = 'Warning (store_timestep): File %s could not be opened' \
    834                       ' - trying again' % self.filename
    835                 log.critical(msg)
    836                 retries += 1
    837                 sleep(1)
    838             else:
    839                 file_open = True
    840 
    841         if not file_open:
    842             msg = 'File %s could not be opened for append' % self.filename
    843             raise DataFileNotOpenError, msg
    844 
    845         domain = self.domain
    846 
    847         # Get the variables
    848         time = fid.variables['time']
    849         stage = fid.variables['stage']
    850         i = len(time)
    851 
    852         #Store stage
    853         time[i] = self.domain.time
    854 
    855         # Get quantity
    856         Q = domain.quantities[name]
    857         A,V = Q.get_vertex_values(xy=False, precision=self.precision)
    858 
    859         stage[i,:] = A.astype(self.precision)
    860 
    861         #Flush and close
    862         fid.sync()
    863         fid.close()
    864124
    865125
     
    924184        #The values are the index positions of file columns.
    925185        self._attribute_dic, self._title_index_dic = \
    926             csv2dict(self._file_name, title_check_list=title_check_list)
     186            load_csv_as_dict(self._file_name, \
     187            title_check_list=title_check_list)
    927188        try:
    928189            #Have code here that handles caps or lower
     
    1186447    will be excluded
    1187448   
    1188     See underlying function csv2dict for more details.
    1189     """
    1190 
    1191     X, _ = csv2dict(file_name)
     449    See underlying function load_csv_as_dict for more details.
     450    """
     451
     452    X, _ = load_csv_as_dict(file_name)
    1192453
    1193454    msg = 'Polygon csv file must have 3 or 4 columns'
     
    1268529# @brief Convert CSV file to a dictionary of arrays.
    1269530# @param file_name The path to the file to read.
    1270 def csv2array(file_name):
     531def load_csv_as_array(file_name):
    1271532    """
    1272533    Convert CSV files of the form:
     
    1280541
    1281542
    1282     See underlying function csv2dict for more details.
    1283     """
    1284 
    1285     X, _ = csv2dict(file_name)
     543    See underlying function load_csv_as_dict for more details.
     544    """
     545
     546    X, _ = load_csv_as_dict(file_name)
    1286547
    1287548    Y = {}
     
    1291552    return Y
    1292553
    1293 
    1294 ##
    1295 # @brief Read a CSV file and convert to a dictionary of {key: column}.
    1296 # @param file_name The path to the file to read.
    1297 # @param title_check_list List of titles that *must* be columns in the file.
    1298 # @return Two dicts: ({key:column}, {title:index}).
    1299 # @note WARNING: Values are returned as strings.
    1300 def csv2dict(file_name, title_check_list=None):
    1301     """
    1302     Load in the csv as a dictionary, title as key and column info as value.
    1303     Also, create a dictionary, title as key and column index as value,
    1304     to keep track of the column order.
    1305 
    1306     Two dictionaries are returned.
    1307 
    1308     WARNING: Values are returned as strings.
    1309              Do this to change a list of strings to a list of floats
    1310                  time = [float(x) for x in time]
    1311     """
    1312 
    1313     # FIXME(Ole): Consider dealing with files without headers
    1314     # FIXME(Ole): Consider a wrapper automatically converting text fields
    1315     #             to the right type by trying for: int, float, string
    1316    
    1317     attribute_dic = {}
    1318     title_index_dic = {}
    1319     titles_stripped = [] # List of titles
    1320 
    1321     reader = csv.reader(file(file_name))
    1322 
    1323     # Read in and manipulate the title info
    1324     titles = reader.next()
    1325     for i, title in enumerate(titles):
    1326         header = title.strip()
    1327         titles_stripped.append(header)
    1328         title_index_dic[header] = i
    1329     title_count = len(titles_stripped)
    1330 
    1331     # Check required columns
    1332     if title_check_list is not None:
    1333         for title_check in title_check_list:
    1334             if not title_index_dic.has_key(title_check):
    1335                 msg = 'Reading error. This row is not present %s' % title_check
    1336                 raise IOError, msg
    1337 
    1338     # Create a dictionary of column values, indexed by column title
    1339     for line in reader:
    1340         n = len(line) # Number of entries
    1341         if n != title_count:
    1342             msg = 'Entry in file %s had %d columns ' % (file_name, n)
    1343             msg += 'although there were %d headers' % title_count
    1344             raise IOError, msg
    1345         for i, value in enumerate(line):
    1346             attribute_dic.setdefault(titles_stripped[i], []).append(value)
    1347 
    1348     return attribute_dic, title_index_dic
    1349554
    1350555
     
    1390595
    1391596    outfile.close()
    1392 
    1393 
    1394 #########################################################
    1395 #Conversion routines
    1396 ########################################################
    1397 
    1398 ##
    1399 # @brief Convert SWW data to OBJ data.
    1400 # @param basefilename Stem of filename, needs size and extension added.
    1401 # @param size The number of lines to write.
    1402 def sww2obj(basefilename, size):
    1403     """Convert netcdf based data output to obj
    1404     """
    1405 
    1406     from Scientific.IO.NetCDF import NetCDFFile
    1407 
    1408     # Get NetCDF
    1409     FN = create_filename('.', basefilename, 'sww', size)
    1410     log.critical('Reading from %s' % FN)
    1411     fid = NetCDFFile(FN, netcdf_mode_r)  #Open existing file for read
    1412 
    1413     # Get the variables
    1414     x = fid.variables['x']
    1415     y = fid.variables['y']
    1416     z = fid.variables['elevation']
    1417     time = fid.variables['time']
    1418     stage = fid.variables['stage']
    1419 
    1420     M = size  #Number of lines
    1421     xx = num.zeros((M,3), num.float)
    1422     yy = num.zeros((M,3), num.float)
    1423     zz = num.zeros((M,3), num.float)
    1424 
    1425     for i in range(M):
    1426         for j in range(3):
    1427             xx[i,j] = x[i+j*M]
    1428             yy[i,j] = y[i+j*M]
    1429             zz[i,j] = z[i+j*M]
    1430 
    1431     # Write obj for bathymetry
    1432     FN = create_filename('.', basefilename, 'obj', size)
    1433     write_obj(FN,xx,yy,zz)
    1434 
    1435     # Now read all the data with variable information, combine with
    1436     # x,y info and store as obj
    1437     for k in range(len(time)):
    1438         t = time[k]
    1439         log.critical('Processing timestep %f' % t)
    1440 
    1441         for i in range(M):
    1442             for j in range(3):
    1443                 zz[i,j] = stage[k,i+j*M]
    1444 
    1445         #Write obj for variable data
    1446         #FN = create_filename(basefilename, 'obj', size, time=t)
    1447         FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
    1448         write_obj(FN, xx, yy, zz)
    1449 
    1450 
    1451 ##
    1452 # @brief
    1453 # @param basefilename Stem of filename, needs size and extension added.
    1454 def dat2obj(basefilename):
    1455     """Convert line based data output to obj
    1456     FIXME: Obsolete?
    1457     """
    1458 
    1459     import glob, os
    1460     from anuga.config import data_dir
    1461 
    1462     # Get bathymetry and x,y's
    1463     lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
    1464 
    1465     M = len(lines)  #Number of lines
    1466     x = num.zeros((M,3), num.float)
    1467     y = num.zeros((M,3), num.float)
    1468     z = num.zeros((M,3), num.float)
    1469 
    1470     for i, line in enumerate(lines):
    1471         tokens = line.split()
    1472         values = map(float, tokens)
    1473 
    1474         for j in range(3):
    1475             x[i,j] = values[j*3]
    1476             y[i,j] = values[j*3+1]
    1477             z[i,j] = values[j*3+2]
    1478 
    1479     # Write obj for bathymetry
    1480     write_obj(data_dir + os.sep + basefilename + '_geometry', x, y, z)
    1481 
    1482     # Now read all the data files with variable information, combine with
    1483     # x,y info and store as obj.
    1484 
    1485     files = glob.glob(data_dir + os.sep + basefilename + '*.dat')
    1486     for filename in files:
    1487         log.critical('Processing %s' % filename)
    1488 
    1489         lines = open(data_dir + os.sep + filename, 'r').readlines()
    1490         assert len(lines) == M
    1491         root, ext = os.path.splitext(filename)
    1492 
    1493         # Get time from filename
    1494         i0 = filename.find('_time=')
    1495         if i0 == -1:
    1496             #Skip bathymetry file
    1497             continue
    1498 
    1499         i0 += 6  #Position where time starts
    1500         i1 = filename.find('.dat')
    1501 
    1502         if i1 > i0:
    1503             t = float(filename[i0:i1])
    1504         else:
    1505             raise DataTimeError, 'Hmmmm'
    1506 
    1507         for i, line in enumerate(lines):
    1508             tokens = line.split()
    1509             values = map(float,tokens)
    1510 
    1511             for j in range(3):
    1512                 z[i,j] = values[j]
    1513 
    1514         # Write obj for variable data
    1515         write_obj(data_dir + os.sep + basefilename + '_time=%.4f' % t, x, y, z)
    1516 
    1517597
    1518598##
     
    1573653    outfile.close()
    1574654
    1575 
    1576 ##
    1577 # @brief Convert DEM data  to PTS data.
    1578 # @param basename_in Stem of input filename.
    1579 # @param basename_out Stem of output filename.
    1580 # @param easting_min
    1581 # @param easting_max
    1582 # @param northing_min
    1583 # @param northing_max
    1584 # @param use_cache
    1585 # @param verbose
    1586 # @return
    1587 def dem2pts(basename_in, basename_out=None,
    1588             easting_min=None, easting_max=None,
    1589             northing_min=None, northing_max=None,
    1590             use_cache=False, verbose=False,):
    1591     """Read Digitial Elevation model from the following NetCDF format (.dem)
    1592 
    1593     Example:
    1594 
    1595     ncols         3121
    1596     nrows         1800
    1597     xllcorner     722000
    1598     yllcorner     5893000
    1599     cellsize      25
    1600     NODATA_value  -9999
    1601     138.3698 137.4194 136.5062 135.5558 ..........
    1602 
    1603     Convert to NetCDF pts format which is
    1604 
    1605     points:  (Nx2) float array
    1606     elevation: N float array
    1607     """
    1608 
    1609     kwargs = {'basename_out': basename_out,
    1610               'easting_min': easting_min,
    1611               'easting_max': easting_max,
    1612               'northing_min': northing_min,
    1613               'northing_max': northing_max,
    1614               'verbose': verbose}
    1615 
    1616     if use_cache is True:
    1617         from caching import cache
    1618         result = cache(_dem2pts, basename_in, kwargs,
    1619                        dependencies = [basename_in + '.dem'],
    1620                        verbose = verbose)
    1621 
    1622     else:
    1623         result = apply(_dem2pts, [basename_in], kwargs)
    1624 
    1625     return result
    1626 
    1627 
    1628 ##
    1629 # @brief
    1630 # @param basename_in
    1631 # @param basename_out
    1632 # @param verbose
    1633 # @param easting_min
    1634 # @param easting_max
    1635 # @param northing_min
    1636 # @param northing_max
    1637 def _dem2pts(basename_in, basename_out=None, verbose=False,
    1638             easting_min=None, easting_max=None,
    1639             northing_min=None, northing_max=None):
    1640     """Read Digitial Elevation model from the following NetCDF format (.dem)
    1641 
    1642     Internal function. See public function dem2pts for details.
    1643     """
    1644 
    1645     # FIXME: Can this be written feasibly using write_pts?
    1646 
    1647     import os
    1648     from Scientific.IO.NetCDF import NetCDFFile
    1649 
    1650     root = basename_in
    1651 
    1652     # Get NetCDF
    1653     infile = NetCDFFile(root + '.dem', netcdf_mode_r)
    1654 
    1655     if verbose: log.critical('Reading DEM from %s' % (root + '.dem'))
    1656 
    1657     ncols = infile.ncols[0]
    1658     nrows = infile.nrows[0]
    1659     xllcorner = infile.xllcorner[0]  # Easting of lower left corner
    1660     yllcorner = infile.yllcorner[0]  # Northing of lower left corner
    1661     cellsize = infile.cellsize[0]
    1662     NODATA_value = infile.NODATA_value[0]
    1663     dem_elevation = infile.variables['elevation']
    1664 
    1665     zone = infile.zone[0]
    1666     false_easting = infile.false_easting[0]
    1667     false_northing = infile.false_northing[0]
    1668 
    1669     # Text strings
    1670     projection = infile.projection
    1671     datum = infile.datum
    1672     units = infile.units
    1673 
    1674     # Get output file
    1675     if basename_out == None:
    1676         ptsname = root + '.pts'
    1677     else:
    1678         ptsname = basename_out + '.pts'
    1679 
    1680     if verbose: log.critical('Store to NetCDF file %s' % ptsname)
    1681 
    1682     # NetCDF file definition
    1683     outfile = NetCDFFile(ptsname, netcdf_mode_w)
    1684 
    1685     # Create new file
    1686     outfile.institution = 'Geoscience Australia'
    1687     outfile.description = 'NetCDF pts format for compact and portable ' \
    1688                           'storage of spatial point data'
    1689 
    1690     # Assign default values
    1691     if easting_min is None: easting_min = xllcorner
    1692     if easting_max is None: easting_max = xllcorner + ncols*cellsize
    1693     if northing_min is None: northing_min = yllcorner
    1694     if northing_max is None: northing_max = yllcorner + nrows*cellsize
    1695 
    1696     # Compute offsets to update georeferencing
    1697     easting_offset = xllcorner - easting_min
    1698     northing_offset = yllcorner - northing_min
    1699 
    1700     # Georeferencing
    1701     outfile.zone = zone
    1702     outfile.xllcorner = easting_min # Easting of lower left corner
    1703     outfile.yllcorner = northing_min # Northing of lower left corner
    1704     outfile.false_easting = false_easting
    1705     outfile.false_northing = false_northing
    1706 
    1707     outfile.projection = projection
    1708     outfile.datum = datum
    1709     outfile.units = units
    1710 
    1711     # Grid info (FIXME: probably not going to be used, but heck)
    1712     outfile.ncols = ncols
    1713     outfile.nrows = nrows
    1714 
    1715     dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))
    1716     totalnopoints = nrows*ncols
    1717 
    1718     # Calculating number of NODATA_values for each row in clipped region
    1719     # FIXME: use array operations to do faster
    1720     nn = 0
    1721     k = 0
    1722     i1_0 = 0
    1723     j1_0 = 0
    1724     thisj = 0
    1725     thisi = 0
    1726     for i in range(nrows):
    1727         y = (nrows-i-1)*cellsize + yllcorner
    1728         for j in range(ncols):
    1729             x = j*cellsize + xllcorner
    1730             if easting_min <= x <= easting_max \
    1731                and northing_min <= y <= northing_max:
    1732                 thisj = j
    1733                 thisi = i
    1734                 if dem_elevation_r[i,j] == NODATA_value:
    1735                     nn += 1
    1736 
    1737                 if k == 0:
    1738                     i1_0 = i
    1739                     j1_0 = j
    1740 
    1741                 k += 1
    1742 
    1743     index1 = j1_0
    1744     index2 = thisj
    1745 
    1746     # Dimension definitions
    1747     nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
    1748     ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
    1749 
    1750     clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
    1751     nopoints = clippednopoints-nn
    1752 
    1753     clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
    1754 
    1755     if verbose:
    1756         log.critical('There are %d values in the elevation' % totalnopoints)
    1757         log.critical('There are %d values in the clipped elevation'
    1758                      % clippednopoints)
    1759         log.critical('There are %d NODATA_values in the clipped elevation' % nn)
    1760 
    1761     outfile.createDimension('number_of_points', nopoints)
    1762     outfile.createDimension('number_of_dimensions', 2) #This is 2d data
    1763 
    1764     # Variable definitions
    1765     outfile.createVariable('points', netcdf_float, ('number_of_points',
    1766                                                     'number_of_dimensions'))
    1767     outfile.createVariable('elevation', netcdf_float, ('number_of_points',))
    1768 
    1769     # Get handles to the variables
    1770     points = outfile.variables['points']
    1771     elevation = outfile.variables['elevation']
    1772 
    1773     lenv = index2-index1+1
    1774 
    1775     # Store data
    1776     global_index = 0
    1777     # for i in range(nrows):
    1778     for i in range(i1_0, thisi+1, 1):
    1779         if verbose and i % ((nrows+10)/10) == 0:
    1780             log.critical('Processing row %d of %d' % (i, nrows))
    1781 
    1782         lower_index = global_index
    1783 
    1784         v = dem_elevation_r[i,index1:index2+1]
    1785         no_NODATA = num.sum(v == NODATA_value)
    1786         if no_NODATA > 0:
    1787             newcols = lenv - no_NODATA  # ncols_in_bounding_box - no_NODATA
    1788         else:
    1789             newcols = lenv              # ncols_in_bounding_box
    1790 
    1791         telev = num.zeros(newcols, num.float)
    1792         tpoints = num.zeros((newcols, 2), num.float)
    1793 
    1794         local_index = 0
    1795 
    1796         y = (nrows-i-1)*cellsize + yllcorner
    1797         #for j in range(ncols):
    1798         for j in range(j1_0,index2+1,1):
    1799             x = j*cellsize + xllcorner
    1800             if easting_min <= x <= easting_max \
    1801                and northing_min <= y <= northing_max \
    1802                and dem_elevation_r[i,j] <> NODATA_value:
    1803                 tpoints[local_index, :] = [x-easting_min, y-northing_min]
    1804                 telev[local_index] = dem_elevation_r[i, j]
    1805                 global_index += 1
    1806                 local_index += 1
    1807 
    1808         upper_index = global_index
    1809 
    1810         if upper_index == lower_index + newcols:
    1811             points[lower_index:upper_index, :] = tpoints
    1812             elevation[lower_index:upper_index] = telev
    1813 
    1814     assert global_index == nopoints, 'index not equal to number of points'
    1815 
    1816     infile.close()
    1817     outfile.close()
    1818655
    1819656
     
    34562293
    34572294    outfile.close()
    3458 
    3459 
    3460 ##
    3461 # @brief Convert time-series text file to TMS file.
    3462 # @param filename
    3463 # @param quantity_names
    3464 # @param time_as_seconds
    3465 def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
    3466     """Template for converting typical text files with time series to
    3467     NetCDF tms file.
    3468 
    3469     The file format is assumed to be either two fields separated by a comma:
    3470 
    3471         time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
    3472 
    3473     E.g
    3474 
    3475       31/08/04 00:00:00, 1.328223 0 0
    3476       31/08/04 00:15:00, 1.292912 0 0
    3477 
    3478     or time (seconds), value0 value1 value2 ...
    3479 
    3480       0.0, 1.328223 0 0
    3481       0.1, 1.292912 0 0
    3482 
    3483     will provide a time dependent function f(t) with three attributes
    3484 
    3485     filename is assumed to be the rootname with extenisons .txt and .sww
    3486     """
    3487 
    3488     import time, calendar
    3489     from anuga.config import time_format
    3490     from anuga.utilities.numerical_tools import ensure_numeric
    3491 
    3492     file_text = filename + '.txt'
    3493     fid = open(file_text)
    3494     line = fid.readline()
    3495     fid.close()
    3496 
    3497     fields = line.split(',')
    3498     msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \
    3499           % file_text
    3500     assert len(fields) == 2, msg
    3501 
    3502     if not time_as_seconds:
    3503         try:
    3504             starttime = calendar.timegm(time.strptime(fields[0], time_format))
    3505         except ValueError:
    3506             msg = 'First field in file %s must be' % file_text
    3507             msg += ' date-time with format %s.\n' % time_format
    3508             msg += 'I got %s instead.' % fields[0]
    3509             raise DataTimeError, msg
    3510     else:
    3511         try:
    3512             starttime = float(fields[0])
    3513         except Error:
    3514             msg = "Bad time format"
    3515             raise DataTimeError, msg
    3516 
    3517     # Split values
    3518     values = []
    3519     for value in fields[1].split():
    3520         values.append(float(value))
    3521 
    3522     q = ensure_numeric(values)
    3523 
    3524     msg = 'ERROR: File must contain at least one independent value'
    3525     assert len(q.shape) == 1, msg
    3526 
    3527     # Read times proper
    3528     from anuga.config import time_format
    3529     import time, calendar
    3530 
    3531     fid = open(file_text)
    3532     lines = fid.readlines()
    3533     fid.close()
    3534 
    3535     N = len(lines)
    3536     d = len(q)
    3537 
    3538     T = num.zeros(N, num.float)       # Time
    3539     Q = num.zeros((N, d), num.float)  # Values
    3540 
    3541     for i, line in enumerate(lines):
    3542         fields = line.split(',')
    3543         if not time_as_seconds:
    3544             realtime = calendar.timegm(time.strptime(fields[0], time_format))
    3545         else:
    3546              realtime = float(fields[0])
    3547         T[i] = realtime - starttime
    3548 
    3549         for j, value in enumerate(fields[1].split()):
    3550             Q[i, j] = float(value)
    3551 
    3552     msg = 'File %s must list time as a monotonuosly ' % filename
    3553     msg += 'increasing sequence'
    3554     assert num.alltrue(T[1:] - T[:-1] > 0), msg
    3555 
    3556     #Create NetCDF file
    3557     from Scientific.IO.NetCDF import NetCDFFile
    3558 
    3559     fid = NetCDFFile(filename + '.tms', netcdf_mode_w)
    3560 
    3561     fid.institution = 'Geoscience Australia'
    3562     fid.description = 'Time series'
    3563 
    3564     #Reference point
    3565     #Start time in seconds since the epoch (midnight 1/1/1970)
    3566     #FIXME: Use Georef
    3567     fid.starttime = starttime
    3568 
    3569     # dimension definitions
    3570     #fid.createDimension('number_of_volumes', self.number_of_volumes)
    3571     #fid.createDimension('number_of_vertices', 3)
    3572 
    3573     fid.createDimension('number_of_timesteps', len(T))
    3574 
    3575     fid.createVariable('time', netcdf_float, ('number_of_timesteps',))
    3576 
    3577     fid.variables['time'][:] = T
    3578 
    3579     for i in range(Q.shape[1]):
    3580         try:
    3581             name = quantity_names[i]
    3582         except:
    3583             name = 'Attribute%d' % i
    3584 
    3585         fid.createVariable(name, netcdf_float, ('number_of_timesteps',))
    3586         fid.variables[name][:] = Q[:,i]
    3587 
    3588     fid.close()
    35892295
    35902296
     
    40542760
    40552761##
    4056 # @brief
    4057 # @param filename
    4058 # @param verbose
    4059 def tsh2sww(filename, verbose=False):
    4060     """
    4061     to check if a tsh/msh file 'looks' good.
    4062     """
    4063 
    4064     if verbose == True: log.critical('Creating domain from %s' % filename)
    4065 
    4066     domain = pmesh_to_domain_instance(filename, Domain)
    4067 
    4068     if verbose == True: log.critical("Number of triangles = %s" % len(domain))
    4069 
    4070     domain.smooth = True
    4071     domain.format = 'sww'   #Native netcdf visualisation format
    4072     file_path, filename = path.split(filename)
    4073     filename, ext = path.splitext(filename)
    4074     domain.set_name(filename)
    4075     domain.reduction = mean
    4076 
    4077     if verbose == True: log.critical("file_path = %s" % file_path)
    4078 
    4079     if file_path == "":
    4080         file_path = "."
    4081     domain.set_datadir(file_path)
    4082 
    4083     if verbose == True:
    4084         log.critical("Output written to %s%s%s.%s"
    4085                      % (domain.get_datadir(), sep, domain.get_name(),
    4086                         domain.format))
    4087 
    4088     sww = SWW_file(domain)
    4089     sww.store_connectivity()
    4090     sww.store_timestep()
    4091 
    4092 
    4093 ##
    4094 # @brief Convert CSIRO ESRI file to an SWW boundary file.
    4095 # @param bath_dir
    4096 # @param elevation_dir
    4097 # @param ucur_dir
    4098 # @param vcur_dir
    4099 # @param sww_file
    4100 # @param minlat
    4101 # @param maxlat
    4102 # @param minlon
    4103 # @param maxlon
    4104 # @param zscale
    4105 # @param mean_stage
    4106 # @param fail_on_NaN
    4107 # @param elevation_NaN_filler
    4108 # @param bath_prefix
    4109 # @param elevation_prefix
    4110 # @param verbose
    4111 # @note Also convert latitude and longitude to UTM. All coordinates are
    4112 #       assumed to be given in the GDA94 datum.
    4113 def asc_csiro2sww(bath_dir,
    4114                   elevation_dir,
    4115                   ucur_dir,
    4116                   vcur_dir,
    4117                   sww_file,
    4118                   minlat=None, maxlat=None,
    4119                   minlon=None, maxlon=None,
    4120                   zscale=1,
    4121                   mean_stage=0,
    4122                   fail_on_NaN=True,
    4123                   elevation_NaN_filler=0,
    4124                   bath_prefix='ba',
    4125                   elevation_prefix='el',
    4126                   verbose=False):
    4127     """
    4128     Produce an sww boundary file, from esri ascii data from CSIRO.
    4129 
    4130     Also convert latitude and longitude to UTM. All coordinates are
    4131     assumed to be given in the GDA94 datum.
    4132 
    4133     assume:
    4134     All files are in esri ascii format
    4135 
    4136     4 types of information
    4137     bathymetry
    4138     elevation
    4139     u velocity
    4140     v velocity
    4141 
    4142     Assumptions
    4143     The metadata of all the files is the same
    4144     Each type is in a seperate directory
    4145     One bath file with extention .000
    4146     The time period is less than 24hrs and uniform.
    4147     """
    4148 
    4149     from Scientific.IO.NetCDF import NetCDFFile
    4150 
    4151     from anuga.coordinate_transforms.redfearn import redfearn
    4152 
    4153     precision = netcdf_float # So if we want to change the precision its done here
    4154 
    4155     # go in to the bath dir and load the only file,
    4156     bath_files = os.listdir(bath_dir)
    4157     bath_file = bath_files[0]
    4158     bath_dir_file =  bath_dir + os.sep + bath_file
    4159     bath_metadata, bath_grid =  _read_asc(bath_dir_file)
    4160 
    4161     #Use the date.time of the bath file as a basis for
    4162     #the start time for other files
    4163     base_start = bath_file[-12:]
    4164 
    4165     #go into the elevation dir and load the 000 file
    4166     elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
    4167                          + base_start
    4168 
    4169     elevation_files = os.listdir(elevation_dir)
    4170     ucur_files = os.listdir(ucur_dir)
    4171     vcur_files = os.listdir(vcur_dir)
    4172     elevation_files.sort()
    4173 
    4174     # the first elevation file should be the
    4175     # file with the same base name as the bath data
    4176     assert elevation_files[0] == 'el' + base_start
    4177 
    4178     number_of_latitudes = bath_grid.shape[0]
    4179     number_of_longitudes = bath_grid.shape[1]
    4180     number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
    4181 
    4182     longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \
    4183                   for x in range(number_of_longitudes)]
    4184     latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \
    4185                  for y in range(number_of_latitudes)]
    4186 
    4187      # reverse order of lat, so the first lat represents the first grid row
    4188     latitudes.reverse()
    4189 
    4190     kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
    4191                                                   minlat=minlat, maxlat=maxlat,
    4192                                                   minlon=minlon, maxlon=maxlon)
    4193 
    4194     bath_grid = bath_grid[kmin:kmax,lmin:lmax]
    4195     latitudes = latitudes[kmin:kmax]
    4196     longitudes = longitudes[lmin:lmax]
    4197     number_of_latitudes = len(latitudes)
    4198     number_of_longitudes = len(longitudes)
    4199     number_of_times = len(os.listdir(elevation_dir))
    4200     number_of_points = number_of_latitudes * number_of_longitudes
    4201     number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
    4202 
    4203     #Work out the times
    4204     if len(elevation_files) > 1:
    4205         # Assume: The time period is less than 24hrs.
    4206         time_period = (int(elevation_files[1][-3:]) \
    4207                        - int(elevation_files[0][-3:])) * 60*60
    4208         times = [x*time_period for x in range(len(elevation_files))]
    4209     else:
    4210         times = [0.0]
    4211 
    4212     if verbose:
    4213         log.critical('------------------------------------------------')
    4214         log.critical('Statistics:')
    4215         log.critical('  Extent (lat/lon):')
    4216         log.critical('    lat in [%f, %f], len(lat) == %d'
    4217                      % (min(latitudes), max(latitudes), len(latitudes)))
    4218         log.critical('    lon in [%f, %f], len(lon) == %d'
    4219                      % (min(longitudes), max(longitudes), len(longitudes)))
    4220         log.critical('    t in [%f, %f], len(t) == %d'
    4221                      % (min(times), max(times), len(times)))
    4222 
    4223     ######### WRITE THE SWW FILE #############
    4224 
    4225     # NetCDF file definition
    4226     outfile = NetCDFFile(sww_file, netcdf_mode_w)
    4227 
    4228     #Create new file
    4229     outfile.institution = 'Geoscience Australia'
    4230     outfile.description = 'Converted from XXX'
    4231 
    4232     #For sww compatibility
    4233     outfile.smoothing = 'Yes'
    4234     outfile.order = 1
    4235 
    4236     #Start time in seconds since the epoch (midnight 1/1/1970)
    4237     outfile.starttime = starttime = times[0]
    4238 
    4239     # dimension definitions
    4240     outfile.createDimension('number_of_volumes', number_of_volumes)
    4241     outfile.createDimension('number_of_vertices', 3)
    4242     outfile.createDimension('number_of_points', number_of_points)
    4243     outfile.createDimension('number_of_timesteps', number_of_times)
    4244 
    4245     # variable definitions
    4246     outfile.createVariable('x', precision, ('number_of_points',))
    4247     outfile.createVariable('y', precision, ('number_of_points',))
    4248     outfile.createVariable('elevation', precision, ('number_of_points',))
    4249 
    4250     #FIXME: Backwards compatibility
    4251     #outfile.createVariable('z', precision, ('number_of_points',))
    4252     #################################
    4253 
    4254     outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
    4255                                                    'number_of_vertices'))
    4256 
    4257     outfile.createVariable('time', precision, ('number_of_timesteps',))
    4258 
    4259     outfile.createVariable('stage', precision, ('number_of_timesteps',
    4260                                                 'number_of_points'))
    4261 
    4262     outfile.createVariable('xmomentum', precision, ('number_of_timesteps',
    4263                                                     'number_of_points'))
    4264 
    4265     outfile.createVariable('ymomentum', precision, ('number_of_timesteps',
    4266                                                     'number_of_points'))
    4267 
    4268     #Store
    4269     from anuga.coordinate_transforms.redfearn import redfearn
    4270 
    4271     x = num.zeros(number_of_points, num.float)  #Easting
    4272     y = num.zeros(number_of_points, num.float)  #Northing
    4273 
    4274     if verbose: log.critical('Making triangular grid')
    4275 
    4276     #Get zone of 1st point.
    4277     refzone, _, _ = redfearn(latitudes[0], longitudes[0])
    4278 
    4279     vertices = {}
    4280     i = 0
    4281     for k, lat in enumerate(latitudes):
    4282         for l, lon in enumerate(longitudes):
    4283             vertices[l,k] = i
    4284 
    4285             zone, easting, northing = redfearn(lat, lon)
    4286 
    4287             #msg = 'Zone boundary crossed at longitude =', lon
    4288             #assert zone == refzone, msg
    4289             #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
    4290             x[i] = easting
    4291             y[i] = northing
    4292             i += 1
    4293 
    4294     #Construct 2 triangles per 'rectangular' element
    4295     volumes = []
    4296     for l in range(number_of_longitudes-1):    #X direction
    4297         for k in range(number_of_latitudes-1): #Y direction
    4298             v1 = vertices[l,k+1]
    4299             v2 = vertices[l,k]
    4300             v3 = vertices[l+1,k+1]
    4301             v4 = vertices[l+1,k]
    4302 
    4303             #Note, this is different to the ferrit2sww code
    4304             #since the order of the lats is reversed.
    4305             volumes.append([v1,v3,v2]) #Upper element
    4306             volumes.append([v4,v2,v3]) #Lower element
    4307 
    4308     volumes = num.array(volumes, num.int)      #array default#
    4309 
    4310     geo_ref = Geo_reference(refzone, min(x), min(y))
    4311     geo_ref.write_NetCDF(outfile)
    4312 
    4313     # This will put the geo ref in the middle
    4314     #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)
    4315 
    4316     if verbose:
    4317         log.critical('------------------------------------------------')
    4318         log.critical('More Statistics:')
    4319         log.critical('  Extent (/lon):')
    4320         log.critical('    x in [%f, %f], len(lat) == %d'
    4321                      % (min(x), max(x), len(x)))
    4322         log.critical('    y in [%f, %f], len(lon) == %d'
    4323                      % (min(y), max(y), len(y)))
    4324         log.critical('geo_ref: ', geo_ref)
    4325 
    4326     z = num.resize(bath_grid,outfile.variables['elevation'][:].shape)
    4327     outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    4328     outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    4329     # FIXME (Ole): Remove once viewer has been recompiled and changed
    4330     #              to use elevation instead of z
    4331     #outfile.variables['z'][:] = z
    4332     outfile.variables['elevation'][:] = z
    4333     outfile.variables['volumes'][:] = volumes.astype(num.int32) # On Opteron 64
    4334 
    4335     stage = outfile.variables['stage']
    4336     xmomentum = outfile.variables['xmomentum']
    4337     ymomentum = outfile.variables['ymomentum']
    4338 
    4339     outfile.variables['time'][:] = times   #Store time relative
    4340 
    4341     if verbose: log.critical('Converting quantities')
    4342 
    4343     n = number_of_times
    4344     for j in range(number_of_times):
    4345         # load in files
    4346         elevation_meta, elevation_grid = \
    4347             _read_asc(elevation_dir + os.sep + elevation_files[j])
    4348 
    4349         _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
    4350         _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])
    4351 
    4352         #cut matrix to desired size
    4353         elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
    4354         u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
    4355         v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
    4356 
    4357         # handle missing values
    4358         missing = (elevation_grid == elevation_meta['NODATA_value'])
    4359         if num.sometrue (missing):
    4360             if fail_on_NaN:
    4361                 msg = 'File %s contains missing values' \
    4362                       % (elevation_files[j])
    4363                 raise DataMissingValuesError, msg
    4364             else:
    4365                 elevation_grid = elevation_grid*(missing==0) \
    4366                                  + missing*elevation_NaN_filler
    4367 
    4368         if verbose and j % ((n+10)/10) == 0: log.critical('  Doing %d of %d'
    4369                                                           % (j, n))
    4370 
    4371         i = 0
    4372         for k in range(number_of_latitudes):      #Y direction
    4373             for l in range(number_of_longitudes): #X direction
    4374                 w = zscale*elevation_grid[k,l] + mean_stage
    4375                 stage[j,i] = w
    4376                 h = w - z[i]
    4377                 xmomentum[j,i] = u_momentum_grid[k,l]*h
    4378                 ymomentum[j,i] = v_momentum_grid[k,l]*h
    4379                 i += 1
    4380 
    4381     outfile.close()
    4382 
    4383 
    4384 ##
    43852762# @brief Return max&min indexes (for slicing) of area specified.
    43862763# @param latitudes_ref ??
     
    59514328
    59524329
    5953 # @brief A class to write an SWW file.
    5954 class Write_sww:
    5955     from anuga.shallow_water.shallow_water_domain import Domain
    5956 
    5957     RANGE = '_range'
    5958     EXTREMA = ':extrema'
    5959 
    5960     ##
    5961     # brief Instantiate the SWW writer class.
    5962     def __init__(self, static_quantities, dynamic_quantities):
    5963         """Initialise Write_sww with two list af quantity names:
    5964        
    5965         static_quantities (e.g. elevation or friction):
    5966             Stored once at the beginning of the simulation in a 1D array
    5967             of length number_of_points   
    5968         dynamic_quantities (e.g stage):
    5969             Stored every timestep in a 2D array with
    5970             dimensions number_of_points X number_of_timesteps       
    5971        
    5972         """
    5973         self.static_quantities = static_quantities   
    5974         self.dynamic_quantities = dynamic_quantities
    5975 
    5976 
    5977     ##
    5978     # @brief Store a header in the SWW file.
    5979     # @param outfile Open handle to the file that will be written.
    5980     # @param times A list of time slices *or* a start time.
    5981     # @param number_of_volumes The number of triangles.
    5982     # @param number_of_points The number of points.
    5983     # @param description The internal file description string.
    5984     # @param smoothing True if smoothing is to be used.
    5985     # @param order
    5986     # @param sww_precision Data type of the quantity written (netcdf constant)
    5987     # @param verbose True if this function is to be verbose.
    5988     # @note If 'times' is a list, the info will be made relative.
    5989     def store_header(self,
    5990                      outfile,
    5991                      times,
    5992                      number_of_volumes,
    5993                      number_of_points,
    5994                      description='Generated by ANUGA',
    5995                      smoothing=True,
    5996                      order=1,
    5997                      sww_precision=netcdf_float32,
    5998                      verbose=False):
    5999         """Write an SWW file header.
    6000 
    6001         outfile - the open file that will be written
    6002         times - A list of the time slice times OR a start time
    6003         Note, if a list is given the info will be made relative.
    6004         number_of_volumes - the number of triangles
    6005         """
    6006 
    6007         outfile.institution = 'Geoscience Australia'
    6008         outfile.description = description
    6009 
    6010         # For sww compatibility
    6011         if smoothing is True:
    6012             # Smoothing to be depreciated
    6013             outfile.smoothing = 'Yes'
    6014             outfile.vertices_are_stored_uniquely = 'False'
    6015         else:
    6016             # Smoothing to be depreciated
    6017             outfile.smoothing = 'No'
    6018             outfile.vertices_are_stored_uniquely = 'True'
    6019         outfile.order = order
    6020 
    6021         try:
    6022             revision_number = get_revision_number()
    6023         except:
    6024             revision_number = None
    6025         # Allow None to be stored as a string
    6026         outfile.revision_number = str(revision_number)
    6027 
    6028         # This is being used to seperate one number from a list.
    6029         # what it is actually doing is sorting lists from numeric arrays.
    6030         if isinstance(times, (list, num.ndarray)):
    6031             number_of_times = len(times)
    6032             times = ensure_numeric(times)
    6033             if number_of_times == 0:
    6034                 starttime = 0
    6035             else:
    6036                 starttime = times[0]
    6037                 times = times - starttime  #Store relative times
    6038         else:
    6039             number_of_times = 0
    6040             starttime = times
    6041 
    6042 
    6043         outfile.starttime = starttime
    6044 
    6045         # dimension definitions
    6046         outfile.createDimension('number_of_volumes', number_of_volumes)
    6047         outfile.createDimension('number_of_vertices', 3)
    6048         outfile.createDimension('numbers_in_range', 2)
    6049 
    6050         if smoothing is True:
    6051             outfile.createDimension('number_of_points', number_of_points)
    6052             # FIXME(Ole): This will cause sww files for parallel domains to
    6053             # have ghost nodes stored (but not used by triangles).
    6054             # To clean this up, we have to change get_vertex_values and
    6055             # friends in quantity.py (but I can't be bothered right now)
    6056         else:
    6057             outfile.createDimension('number_of_points', 3*number_of_volumes)
    6058 
    6059         outfile.createDimension('number_of_timesteps', number_of_times)
    6060 
    6061         # variable definitions
    6062         outfile.createVariable('x', sww_precision, ('number_of_points',))
    6063         outfile.createVariable('y', sww_precision, ('number_of_points',))
    6064 
    6065         outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
    6066                                                        'number_of_vertices'))
    6067 
    6068         # Doing sww_precision instead of Float gives cast errors.
    6069         outfile.createVariable('time', netcdf_float,
    6070                                ('number_of_timesteps',))
    6071 
    6072                                
    6073         for q in self.static_quantities:
    6074            
    6075             outfile.createVariable(q, sww_precision,
    6076                                    ('number_of_points',))
    6077            
    6078             outfile.createVariable(q + Write_sww.RANGE, sww_precision,
    6079                                    ('numbers_in_range',))
    6080                                    
    6081             # Initialise ranges with small and large sentinels.
    6082             # If this was in pure Python we could have used None sensibly
    6083             outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
    6084             outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
    6085 
    6086         #if 'elevation' in self.static_quantities:   
    6087         #    # FIXME: Backwards compat - get rid of z once old view has retired
    6088         #    outfile.createVariable('z', sww_precision,
    6089         #                           ('number_of_points',))
    6090                                
    6091         for q in self.dynamic_quantities:
    6092             outfile.createVariable(q, sww_precision, ('number_of_timesteps',
    6093                                                       'number_of_points'))
    6094             outfile.createVariable(q + Write_sww.RANGE, sww_precision,
    6095                                    ('numbers_in_range',))
    6096 
    6097             # Initialise ranges with small and large sentinels.
    6098             # If this was in pure Python we could have used None sensibly
    6099             outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
    6100             outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
    6101 
    6102         if isinstance(times, (list, num.ndarray)):
    6103             outfile.variables['time'][:] = times    # Store time relative
    6104 
    6105         if verbose:
    6106             log.critical('------------------------------------------------')
    6107             log.critical('Statistics:')
    6108             log.critical('    t in [%f, %f], len(t) == %d'
    6109                          % (num.min(times), num.max(times), len(times.flat)))
    6110 
    6111     ##
    6112     # @brief Store triangulation data in the underlying file.
    6113     # @param outfile Open handle to underlying file.
    6114     # @param points_utm List or array of points in UTM.
    6115     # @param volumes
    6116     # @param zone
    6117     # @param new_origin georeference that the points can be set to.
    6118     # @param points_georeference The georeference of the points_utm.
    6119     # @param verbose True if this function is to be verbose.
    6120     def store_triangulation(self,
    6121                             outfile,
    6122                             points_utm,
    6123                             volumes,
    6124                             zone=None,
    6125                             new_origin=None,
    6126                             points_georeference=None,
    6127                             verbose=False):
    6128         """
    6129         new_origin - qa georeference that the points can be set to. (Maybe
    6130         do this before calling this function.)
    6131 
    6132         points_utm - currently a list or array of the points in UTM.
    6133         points_georeference - the georeference of the points_utm
    6134 
    6135         How about passing new_origin and current_origin.
    6136         If you get both, do a convertion from the old to the new.
    6137 
    6138         If you only get new_origin, the points are absolute,
    6139         convert to relative
    6140 
    6141         if you only get the current_origin the points are relative, store
    6142         as relative.
    6143 
    6144         if you get no georefs create a new georef based on the minimums of
    6145         points_utm.  (Another option would be to default to absolute)
    6146 
    6147         Yes, and this is done in another part of the code.
    6148         Probably geospatial.
    6149 
    6150         If you don't supply either geo_refs, then supply a zone. If not
    6151         the default zone will be used.
    6152 
    6153         precon:
    6154             header has been called.
    6155         """
    6156 
    6157         number_of_points = len(points_utm)
    6158         volumes = num.array(volumes)
    6159         points_utm = num.array(points_utm)
    6160 
    6161         # Given the two geo_refs and the points, do the stuff
    6162         # described in the method header
    6163         # if this is needed else where, pull out as a function
    6164         points_georeference = ensure_geo_reference(points_georeference)
    6165         new_origin = ensure_geo_reference(new_origin)
    6166         if new_origin is None and points_georeference is not None:
    6167             points = points_utm
    6168             geo_ref = points_georeference
    6169         else:
    6170             if new_origin is None:
    6171                 new_origin = Geo_reference(zone, min(points_utm[:,0]),
    6172                                                  min(points_utm[:,1]))
    6173             points = new_origin.change_points_geo_ref(points_utm,
    6174                                                       points_georeference)
    6175             geo_ref = new_origin
    6176 
    6177         # At this stage I need a georef and points
    6178         # the points are relative to the georef
    6179         geo_ref.write_NetCDF(outfile)
    6180 
    6181         # This will put the geo ref in the middle
    6182         #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
    6183 
    6184         x =  points[:,0]
    6185         y =  points[:,1]
    6186 
    6187         if verbose:
    6188             log.critical('------------------------------------------------')
    6189             log.critical('More Statistics:')
    6190             log.critical('  Extent (/lon):')
    6191             log.critical('    x in [%f, %f], len(lat) == %d'
    6192                          % (min(x), max(x), len(x)))
    6193             log.critical('    y in [%f, %f], len(lon) == %d'
    6194                          % (min(y), max(y), len(y)))
    6195             #log.critical('    z in [%f, %f], len(z) == %d'
    6196             #             % (min(elevation), max(elevation), len(elevation)))
    6197             log.critical('geo_ref: %s' % str(geo_ref))
    6198             log.critical('------------------------------------------------')
    6199 
    6200         outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
    6201         outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
    6202         outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
    6203 
    6204 
    6205 
    6206     # @brief Write the static quantity data to the underlying file.
    6207     # @param outfile Handle to open underlying file.
    6208     # @param sww_precision Format of quantity data to write (default Float32).
    6209     # @param verbose True if this function is to be verbose.
    6210     # @param **quant
    6211     def store_static_quantities(self,
    6212                                 outfile,
    6213                                 sww_precision=num.float32,
    6214                                 verbose=False,
    6215                                 **quant):
    6216         """
    6217         Write the static quantity info.
    6218 
    6219         **quant is extra keyword arguments passed in. These must be
    6220           the numpy arrays to be stored in the sww file at each timestep.
    6221 
    6222         The argument sww_precision allows for storing as either
    6223         * single precision (default): num.float32
    6224         * double precision: num.float64 or num.float
    6225 
    6226         Precondition:
    6227             store_triangulation and
    6228             store_header have been called.
    6229         """
    6230 
    6231         # The dictionary quant must contain numpy arrays for each name.
    6232         # These will typically be quantities from Domain such as friction
    6233         #
    6234         # Arrays not listed in static_quantitiues will be ignored, silently.
    6235         #
    6236         # This method will also write the ranges for each quantity,
    6237         # e.g. stage_range, xmomentum_range and ymomentum_range
    6238         for q in self.static_quantities:
    6239             if not quant.has_key(q):
    6240                 msg = 'Values for quantity %s was not specified in ' % q
    6241                 msg += 'store_quantities so they cannot be stored.'
    6242                 raise NewQuantity, msg
    6243             else:
    6244                 q_values = ensure_numeric(quant[q])
    6245                
    6246                 x = q_values.astype(sww_precision)
    6247                 outfile.variables[q][:] = x
    6248        
    6249                 # This populates the _range values
    6250                 outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
    6251                 outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
    6252                    
    6253         # FIXME: Hack for backwards compatibility with old viewer
    6254         #if 'elevation' in self.static_quantities:
    6255         #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
    6256 
    6257                    
    6258                    
    6259        
    6260        
    6261     ##
    6262     # @brief Write the quantity data to the underlying file.
    6263     # @param outfile Handle to open underlying file.
    6264     # @param sww_precision Format of quantity data to write (default Float32).
    6265     # @param slice_index
    6266     # @param time
    6267     # @param verbose True if this function is to be verbose.
    6268     # @param **quant
    6269     def store_quantities(self,
    6270                          outfile,
    6271                          sww_precision=num.float32,
    6272                          slice_index=None,
    6273                          time=None,
    6274                          verbose=False,
    6275                          **quant):
    6276         """
    6277         Write the quantity info at each timestep.
    6278 
    6279         **quant is extra keyword arguments passed in. These must be
    6280           the numpy arrays to be stored in the sww file at each timestep.
    6281 
    6282         if the time array is already been built, use the slice_index
    6283         to specify the index.
    6284 
    6285         Otherwise, use time to increase the time dimension
    6286 
    6287         Maybe make this general, but the viewer assumes these quantities,
    6288         so maybe we don't want it general - unless the viewer is general
    6289        
    6290         The argument sww_precision allows for storing as either
    6291         * single precision (default): num.float32
    6292         * double precision: num.float64 or num.float
    6293 
    6294         Precondition:
    6295             store_triangulation and
    6296             store_header have been called.
    6297         """
    6298 
    6299         if time is not None:
    6300             file_time = outfile.variables['time']
    6301             slice_index = len(file_time)
    6302             file_time[slice_index] = time
    6303         else:
    6304             slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
    6305 
    6306         # Write the named dynamic quantities
    6307         # The dictionary quant must contain numpy arrays for each name.
    6308         # These will typically be the conserved quantities from Domain
    6309         # (Typically stage,  xmomentum, ymomentum).
    6310         #
    6311         # Arrays not listed in dynamic_quantitiues will be ignored, silently.
    6312         #
    6313         # This method will also write the ranges for each quantity,
    6314         # e.g. stage_range, xmomentum_range and ymomentum_range
    6315         for q in self.dynamic_quantities:
    6316             if not quant.has_key(q):
    6317                 msg = 'Values for quantity %s was not specified in ' % q
    6318                 msg += 'store_quantities so they cannot be stored.'
    6319                 raise NewQuantity, msg
    6320             else:
    6321                 q_values = ensure_numeric(quant[q])
    6322                
    6323                 x = q_values.astype(sww_precision)
    6324                 outfile.variables[q][slice_index] = x
    6325                    
    6326        
    6327                 # This updates the _range values
    6328                 q_range = outfile.variables[q + Write_sww.RANGE][:]
    6329                 q_values_min = num.min(q_values)
    6330                 if q_values_min < q_range[0]:
    6331                     outfile.variables[q + Write_sww.RANGE][0] = q_values_min
    6332                 q_values_max = num.max(q_values)
    6333                 if q_values_max > q_range[1]:
    6334                     outfile.variables[q + Write_sww.RANGE][1] = q_values_max
    6335 
    6336     ##
    6337     # @brief Print the quantities in the underlying file.
    6338     # @param outfile UNUSED.
    6339     def verbose_quantities(self, outfile):
    6340         log.critical('------------------------------------------------')
    6341         log.critical('More Statistics:')
    6342         for q in self.dynamic_quantities:
    6343             log.critical('  %s in [%f, %f]'
    6344                          % (q, outfile.variables[q+Write_sww.RANGE][0],
    6345                             outfile.variables[q+Write_sww.RANGE][1]))
    6346         log.critical('------------------------------------------------')
    6347 
    6348 
    63494330##
    63504331# @brief Obsolete?
     
    76525633
    76535634##
    7654 # @brief Find all SWW files in a directory with given stem name.
    7655 # @param look_in_dir The directory to look in.
    7656 # @param base_name The file stem name.
    7657 # @param verbose True if this function is to be verbose.
    7658 # @return A list of found filename strings.
    7659 # @note Will accept 'base_name' with or without '.sww' extension.
    7660 # @note If no files found, raises IOError exception.
    7661 def get_all_swwfiles(look_in_dir='', base_name='', verbose=False):
    7662     '''
    7663     Finds all the sww files in a "look_in_dir" which contains a "base_name".
    7664     will accept base_name with or without the extension ".sww"
    7665 
    7666     Returns: a list of strings
    7667 
    7668     Usage:     iterate_over = get_all_swwfiles(dir, name)
    7669     then
    7670                for swwfile in iterate_over:
    7671                    do stuff
    7672 
    7673     Check "export_grids" and "get_maximum_inundation_data" for examples
    7674     '''
    7675 
    7676     # plus tests the extension
    7677     name, extension = os.path.splitext(base_name)
    7678 
    7679     if extension != '' and extension != '.sww':
    7680         msg = 'file %s%s must be a NetCDF sww file!' % (base_name, extension)
    7681         raise IOError, msg
    7682 
    7683     if look_in_dir == "":
    7684         look_in_dir = "."                                   # Unix compatibility
    7685 
    7686     dir_ls = os.listdir(look_in_dir)
    7687     iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
    7688     if len(iterate_over) == 0:
    7689         msg = 'No files of the base name %s' % name
    7690         raise IOError, msg
    7691 
    7692     if verbose: log.critical('iterate over %s' % iterate_over)
    7693 
    7694     return iterate_over
    7695 
    7696 
    7697 ##
    7698 # @brief Find all files in a directory that contain a string and have extension.
    7699 # @param look_in_dir Path to the directory to look in.
    7700 # @param base_name Stem filename of the file(s) of interest.
    7701 # @param extension Extension of the files to look for.
    7702 # @param verbose True if this function is to be verbose.
    7703 # @return A list of found filename strings.
    7704 # @note If no files found, raises IOError exception.
    7705 def get_all_files_with_extension(look_in_dir='',
    7706                                  base_name='',
    7707                                  extension='.sww',
    7708                                  verbose=False):
    7709     '''Find all files in a directory with given stem name.
    7710     Finds all the sww files in a "look_in_dir" which contains a "base_name".
    7711 
    7712     Returns: a list of strings
    7713 
    7714     Usage:     iterate_over = get_all_swwfiles(dir, name)
    7715     then
    7716                for swwfile in iterate_over:
    7717                    do stuff
    7718 
    7719     Check "export_grids" and "get_maximum_inundation_data" for examples
    7720     '''
    7721 
    7722     # plus tests the extension
    7723     name, ext = os.path.splitext(base_name)
    7724 
    7725     if ext != '' and ext != extension:
    7726         msg = 'base_name %s must be a file with %s extension!' \
    7727               % (base_name, extension)
    7728         raise IOError, msg
    7729 
    7730     if look_in_dir == "":
    7731         look_in_dir = "."                               # Unix compatibility
    7732 
    7733     dir_ls = os.listdir(look_in_dir)
    7734     iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
    7735 
    7736     if len(iterate_over) == 0:
    7737         msg = 'No files of the base name %s in %s' % (name, look_in_dir)
    7738         raise IOError, msg
    7739 
    7740     if verbose: log.critical('iterate over %s' % iterate_over)
    7741 
    7742     return iterate_over
    7743 
    7744 
    7745 ##
    7746 # @brief Find all files in a directory that contain a given string.
    7747 # @param look_in_dir Path to the directory to look in.
    7748 # @param base_name String that files must contain.
    7749 # @param verbose True if this function is to be verbose.
    7750 def get_all_directories_with_name(look_in_dir='', base_name='', verbose=False):
    7751     '''
    7752     Finds all the directories in a "look_in_dir" which contains a "base_name".
    7753 
    7754     Returns: a list of strings
    7755 
    7756     Usage:     iterate_over = get_all_directories_with_name(dir, name)
    7757     then:      for swwfile in iterate_over:
    7758                    do stuff
    7759 
    7760     Check "export_grids" and "get_maximum_inundation_data" for examples
    7761     '''
    7762 
    7763     if look_in_dir == "":
    7764         look_in_dir = "."                                  # Unix compatibility
    7765 
    7766     dir_ls = os.listdir(look_in_dir)
    7767     iterate_over = [x for x in dir_ls if base_name in x]
    7768 
    7769     if len(iterate_over) == 0:
    7770         msg = 'No files of the base name %s' % base_name
    7771         raise IOError, msg
    7772 
    7773     if verbose: log.critical('iterate over %s' % iterate_over)
    7774 
    7775     return iterate_over
    7776 
    7777 
    7778 ##
    77795635# @brief Convert points to a polygon (?)
    77805636# @param points_file The points file.
  • anuga_core/source/anuga/shallow_water/shallow_water_domain.py

    r7733 r7735  
    664664        """
    665665
    666         from anuga.shallow_water.data_manager import SWW_file
     666        from anuga.shallow_water.sww_file import SWW_file
    667667       
    668668        # Initialise writer
  • anuga_core/source/anuga/shallow_water/test_data_manager.py

    r7618 r7735  
    1818from anuga.shallow_water import *
    1919from anuga.shallow_water.data_manager import *
     20from anuga.shallow_water.sww_file import SWW_file
     21from anuga.shallow_water.file_conversion import tsh2sww, \
     22                        asc_csiro2sww, pmesh_to_domain_instance, \
     23                        dem2pts
    2024from anuga.config import epsilon, g
    2125from anuga.utilities.anuga_exceptions import ANUGAError
     
    2428from anuga.abstract_2d_finite_volumes.util import file_function
    2529from anuga.utilities.system_tools import get_pathname_from_package
     30from anuga.utilities.file_utils import del_dir, load_csv_as_dict
    2631from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
    2732from anuga.config import netcdf_float
     
    44444449
    44454450        import time, os
    4446         from Scientific.IO.NetCDF import NetCDFFile
    44474451
    44484452        # the memory optimised least squares
     
    44694473
    44704474        import time, os
    4471         from Scientific.IO.NetCDF import NetCDFFile
    44724475
    44734476        from data_manager import _read_asc
     
    77107713            x = fid.variables['x'][:]+fid.xllcorner    # x-coordinates of vertices
    77117714            y = fid.variables['y'][:]+fid.yllcorner    # y-coordinates of vertices
    7712             elevation = fid.variables['elevation'][:]
     7715            elevation = fid.variables['elevation'][:]
    77137716            time=fid.variables['time'][:]+fid.starttime
    77147717
     
    77367739                count = 0
    77377740
    7738                 # read in urs test data for stage, e and n velocity
     7741                # read in urs test data for stage, e and n velocity
    77397742                urs_file_name_z = 'z_'+str(source_number)+'_'+str(j)+'.csv'
    7740                 dict = csv2array(os.path.join(testdir, urs_file_name_z))
     7743                dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z))
    77417744                urs_stage = dict['urs_stage']
    77427745                urs_file_name_e = 'e_'+str(source_number)+'_'+str(j)+'.csv'
    7743                 dict = csv2array(os.path.join(testdir, urs_file_name_e))
     7746                dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e))
    77447747                urs_e = dict['urs_e']
    77457748                urs_file_name_n = 'n_'+str(source_number)+'_'+str(j)+'.csv'
    7746                 dict = csv2array(os.path.join(testdir, urs_file_name_n))
     7749                dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n))
    77477750                urs_n = dict['urs_n']
    77487751
    7749                 # find start and end time for stage             
     7752                # find start and end time for stage             
    77507753                for i in range(len(urs_stage)):
    77517754                    if urs_stage[i] == 0.0:
     
    77747777                assert num.allclose(index_start_urs_z,start_times_z[j]/delta_t), msg
    77757778
    7776                 msg = 'e velocity start time from urs file is not the same as the '
     7779                msg = 'e velocity start time from urs file is not the same as the '
    77777780                msg += 'header file for source %i and station %i' %(source_number,j)
    7778                 assert num.allclose(index_start_urs_e,start_times_e[j]/delta_t), msg
    7779 
    7780                 msg = 'n velocity start time from urs file is not the same as the '
     7781                assert num.allclose(index_start_urs_e,start_times_e[j]/delta_t), msg
     7782
     7783                msg = 'n velocity start time from urs file is not the same as the '
    77817784                msg += 'header file for source %i and station %i' %(source_number,j)
    7782                 assert num.allclose(index_start_urs_n,start_times_n[j]/delta_t), msg
    7783                
    7784                 # get index for start and end time for sts quantities
     7785                assert num.allclose(index_start_urs_n,start_times_n[j]/delta_t), msg
     7786               
     7787                # get index for start and end time for sts quantities
    77857788                index_start_stage = 0
    77867789                index_end_stage = 0
     
    78107813                                    sts_stage[index_start_stage:index_end_stage],
    78117814                                    rtol=1.0e-6, atol=1.0e-5 ), msg                               
    7812                                
     7815                               
    78137816                # check that urs e velocity and sts xmomentum are the same
    7814                 msg = 'urs e velocity is not equivalent to sts x momentum for for source %i and station %i' %(source_number,j)
     7817                msg = 'urs e velocity is not equivalent to sts x momentum for for source %i and station %i' %(source_number,j)
    78157818                assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]),
    78167819                                sts_xmom[index_start_x:index_end_x],
    78177820                                rtol=1.0e-5, atol=1.0e-4 ), msg
    7818                
     7821               
    78197822                # check that urs n velocity and sts ymomentum are the same
    78207823                #print 'urs n velocity', urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j])
     
    78647867        x = fid.variables['x'][:]+fid.xllcorner   # x-coordinates of vertices
    78657868        y = fid.variables['y'][:]+fid.yllcorner   # y-coordinates of vertices
    7866         elevation = fid.variables['elevation'][:]
     7869        elevation = fid.variables['elevation'][:]
    78677870        time=fid.variables['time'][:]+fid.starttime
    78687871       
     
    78927895        assert num.allclose(sts_starttime, starttime), msg
    78937896   
    7894         #stations = [1,2,3]
     7897        #stations = [1,2,3]
    78957898        #for j in stations:
    78967899        for j in range(len(x)):
    78977900            index_start_urs_z = 0
    78987901            index_end_urs_z = 0
    7899             index_start_urs_e = 0
     7902            index_start_urs_e = 0
    79007903            index_end_urs_e = 0
    7901             index_start_urs_n = 0
     7904            index_start_urs_n = 0
    79027905            index_end_urs_n = 0
    79037906            count = 0
    79047907
    7905             # read in urs test data for stage, e and n velocity
     7908            # read in urs test data for stage, e and n velocity
    79067909            urs_file_name_z = 'z_combined_'+str(j)+'.csv'
    7907             dict = csv2array(os.path.join(testdir, urs_file_name_z))
     7910            dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z))
    79087911            urs_stage = dict['urs_stage']
    79097912            urs_file_name_e = 'e_combined_'+str(j)+'.csv'
    7910             dict = csv2array(os.path.join(testdir, urs_file_name_e))
     7913            dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e))
    79117914            urs_e = dict['urs_e']
    79127915            urs_file_name_n = 'n_combined_'+str(j)+'.csv'
    7913             dict = csv2array(os.path.join(testdir, urs_file_name_n))
     7916            dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n))
    79147917            urs_n = dict['urs_n']
    79157918
    7916             # find start and end time for stage         
     7919            # find start and end time for stage         
    79177920            for i in range(len(urs_stage)):
    79187921                if urs_stage[i] == 0.0:
     
    79377940            assert num.allclose(index_start_urs_z,start_times_z/delta_t), msg
    79387941
    7939             msg = 'e velocity start time from urs file is not the same as the '
     7942            msg = 'e velocity start time from urs file is not the same as the '
    79407943            msg += 'header file at station %i' %(j)
    7941             assert num.allclose(index_start_urs_e,start_times_e/delta_t), msg
    7942 
    7943             msg = 'n velocity start time from urs file is not the same as the '
     7944            assert num.allclose(index_start_urs_e,start_times_e/delta_t), msg
     7945
     7946            msg = 'n velocity start time from urs file is not the same as the '
    79447947            msg += 'header file at station %i' %(j)
    7945             assert num.allclose(index_start_urs_n,start_times_n/delta_t), msg
    7946                
    7947             # get index for start and end time for sts quantities
     7948            assert num.allclose(index_start_urs_n,start_times_n/delta_t), msg
     7949               
     7950            # get index for start and end time for sts quantities
    79487951            index_start_stage = 0
    79497952            index_end_stage = 0
     
    79637966                    index_end_stage = i
    79647967               
    7965             index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z])
    7966 
    7967             index_start_x = index_start_stage
    7968             index_end_x = index_start_x + len(urs_stage[index_start_urs_e:index_end_urs_e])
    7969             sts_xmom = quantities['ymomentum'][:,j]
    7970 
    7971             index_start_y = index_start_stage
    7972             index_end_y = index_start_y + len(urs_stage[index_start_urs_n:index_end_urs_n])
    7973             sts_ymom = quantities['ymomentum'][:,j]
     7968            index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z])
     7969
     7970            index_start_x = index_start_stage
     7971            index_end_x = index_start_x + len(urs_stage[index_start_urs_e:index_end_urs_e])
     7972            sts_xmom = quantities['ymomentum'][:,j]
     7973
     7974            index_start_y = index_start_stage
     7975            index_end_y = index_start_y + len(urs_stage[index_start_urs_n:index_end_urs_n])
     7976            sts_ymom = quantities['ymomentum'][:,j]
    79747977
    79757978            # check that urs stage and sts stage are the same
    79767979            msg = 'urs stage is not equal to sts stage for station %i' %j
    7977             #print 'urs stage', urs_stage[index_start_urs_z:index_end_urs_z]
     7980            #print 'urs stage', urs_stage[index_start_urs_z:index_end_urs_z]
    79787981            #print 'sts stage', sts_stage[index_start_stage:index_end_stage]
    79797982            #print 'diff', max(urs_stage[index_start_urs_z:index_end_urs_z]-sts_stage[index_start_stage:index_end_stage])
     
    79827985                            sts_stage[index_start_stage:index_end_stage],
    79837986                                rtol=1.0e-5, atol=1.0e-4 ), msg                               
    7984                                
     7987                               
    79857988            # check that urs e velocity and sts xmomentum are the same         
    79867989            msg = 'urs e velocity is not equivalent to sts xmomentum for station %i' %j
     
    79887991                            sts_xmom[index_start_x:index_end_x],
    79897992                            rtol=1.0e-5, atol=1.0e-4 ), msg
    7990                
     7993               
    79917994            # check that urs n velocity and sts ymomentum are the same                           
    79927995            msg = 'urs n velocity is not equivalent to sts ymomentum for station %i' %j
     
    79957998                            rtol=1.0e-5, atol=1.0e-4 ), msg
    79967999
    7997         fid.close()
     8000        fid.close()
    79988001       
    79998002        os.remove(sts_name_out+'.sts')
  • anuga_core/source/anuga/shallow_water/test_forcing.py

    r7733 r7735  
    1010import numpy as num
    1111
    12 ### Helper functions
    13 ###
    1412
    1513def scalar_func_list(t, x, y):
     
    2119    return [17.7]
    2220
    23 # Variable windfield implemented using functions
     21
    2422def speed(t, x, y):
    25     """Large speeds halfway between center and edges
     23    """
     24    Variable windfield implemented using functions   
     25    Large speeds halfway between center and edges
    2626
    2727    Low speeds at center and edges
     
    248248
    249249        # Convert ASCII file to NetCDF (Which is what we really like!)
    250         from data_manager import timefile2netcdf
     250        from file_conversion import timefile2netcdf
    251251
    252252        timefile2netcdf(filename)
     
    340340
    341341        # Convert ASCII file to NetCDF (Which is what we really like!)
    342         from data_manager import timefile2netcdf
     342        from file_conversion import timefile2netcdf
    343343
    344344        timefile2netcdf(filename, time_as_seconds=True)
  • anuga_core/source/anuga/shallow_water/test_shallow_water_domain.py

    r7733 r7735  
    60706070        from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular_cross
    60716071        from anuga.shallow_water import Domain
    6072         from anuga.shallow_water.shallow_water_domain import Reflective_boundary
     6072        from anuga.shallow_water.boundaries import Reflective_boundary
    60736073        from anuga.shallow_water.shallow_water_domain import Dirichlet_boundary
    60746074        from anuga.shallow_water.shallow_water_domain import Rainfall
  • anuga_core/source/anuga/shallow_water/tsh2sww.py

    r7340 r7735  
    1313from anuga.pyvolution.pmesh2domain import pmesh_to_domain_instance
    1414import time, os
    15 from anuga.pyvolution.data_manager import SWW_file
     15from anuga.pyvolution.sww_file import SWW_file
    1616from anuga.utilities.numerical_tools import mean
    1717import anuga.utilities.log as log
Note: See TracChangeset for help on using the changeset viewer.