Changeset 6080


Ignore:
Timestamp:
Dec 16, 2008, 10:15:44 AM (15 years ago)
Author:
rwilson
Message:

Fixed a few bugs, pep8 formatted.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • anuga_core/source/anuga/shallow_water/data_manager.py

    r5982 r6080  
    4949"""
    5050
    51 # This file was reverted from changeset:5484 to changeset:5470 on 10th July 
     51# This file was reverted from changeset:5484 to changeset:5470 on 10th July
    5252# by Ole.
    5353
     54import os, sys
     55import csv
    5456import exceptions
    55 class TitleValueError(exceptions.Exception): pass
    56 class DataMissingValuesError(exceptions.Exception): pass
    57 class DataFileNotOpenError(exceptions.Exception): pass
    58 class DataTimeError(exceptions.Exception): pass
    59 class DataDomainError(exceptions.Exception): pass
    60 class NewQuantity(exceptions.Exception): pass
    61 
    62 
    63 
    64 import csv
    65 import os, sys
     57import string
    6658import shutil
    6759from struct import unpack
    6860import array as p_array
    69 #import time, os
    7061from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
    71 
    7262
    7363from Numeric import concatenate, array, Float, Int, Int32, resize, \
     
    7666     argmax, alltrue, shape, Float32, size
    7767
    78 import string
    79 
    8068from Scientific.IO.NetCDF import NetCDFFile
    81 #from shutil import copy
    8269from os.path import exists, basename, join
    8370from os import getcwd
    84 
    8571
    8672from anuga.coordinate_transforms.redfearn import redfearn, \
     
    9076from anuga.geospatial_data.geospatial_data import Geospatial_data,\
    9177     ensure_absolute
    92 from anuga.config import minimum_storable_height as default_minimum_storable_height
     78from anuga.config import minimum_storable_height as \
     79     default_minimum_storable_height
    9380from anuga.config import max_float
    9481from anuga.utilities.numerical_tools import ensure_numeric,  mean
     
    10087from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
    10188     remove_lone_verts, sww2timeseries, get_centroid_values
    102      
     89
    10390from anuga.abstract_2d_finite_volumes.neighbour_mesh import segment_midpoints
    10491from anuga.load_mesh.loadASCII import export_mesh_file
     
    10693
    10794
     95######
     96# Exception classes
     97######
     98
     99class TitleValueError(exceptions.Exception): pass
     100class DataMissingValuesError(exceptions.Exception): pass
     101class DataFileNotOpenError(exceptions.Exception): pass
     102class DataTimeError(exceptions.Exception): pass
     103class DataDomainError(exceptions.Exception): pass
     104class NewQuantity(exceptions.Exception): pass
     105
     106
     107######
    108108# formula mappings
     109######
    109110
    110111quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
     
    114115
    115116
    116    
     117##
     118# @brief Convert a possible filename into a standard form.
     119# @param s Filename to process.
     120# @return The new filename string.
    117121def make_filename(s):
    118122    """Transform argument string into a Sexsuitable filename
     
    128132
    129133
     134##
     135# @brief Check that a specified filesystem directory path exists.
     136# @param path The dirstory path to check.
     137# @param verbose True if this function is to be verbose.
     138# @note If directory path doesn't exist, it will be created.
    130139def check_dir(path, verbose=None):
    131140    """Check that specified path exists.
     
    141150    RETURN VALUE:
    142151        Verified path including trailing separator
    143 
    144152    """
    145153
     
    151159        unix = 1
    152160
    153 
     161    # add terminal separator, if it's not already there
    154162    if path[-1] != os.sep:
    155         path = path + os.sep  # Add separator for directories
    156 
    157     path = os.path.expanduser(path) # Expand ~ or ~user in pathname
    158     if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
     163        path = path + os.sep
     164
     165    # expand ~ or ~username in path
     166    path = os.path.expanduser(path)
     167
     168    # create directory if required
     169    if not (os.access(path, os.R_OK and os.W_OK) or path == ''):
    159170        try:
    160             exitcode=os.mkdir(path)
     171            exitcode = os.mkdir(path)
    161172
    162173            # Change access rights if possible
    163             #
    164174            if unix:
    165                 exitcode=os.system('chmod 775 '+path)
     175                exitcode = os.system('chmod 775 ' + path)
    166176            else:
    167                 pass  # FIXME: What about acces rights under Windows?
     177                pass  # FIXME: What about access rights under Windows?
    168178
    169179            if verbose: print 'MESSAGE: Directory', path, 'created.'
    170 
    171180        except:
    172181            print 'WARNING: Directory', path, 'could not be created.'
     
    174183                path = '/tmp/'
    175184            else:
    176                 path = 'C:'
    177 
    178             print 'Using directory %s instead' %path
    179 
    180     return(path)
    181 
    182 
    183 
     185                path = 'C:' + os.sep
     186
     187            print "Using directory '%s' instead" % path
     188
     189    return path
     190
     191
     192##
     193# @brief Delete directory and all sub-directories.
     194# @param path Path to the directory to delete.
    184195def del_dir(path):
    185196    """Recursively delete directory path and all its contents
    186197    """
    187 
    188     import os
    189198
    190199    if os.path.isdir(path):
    191200        for file in os.listdir(path):
    192201            X = os.path.join(path, file)
    193 
    194202
    195203            if os.path.isdir(X) and not os.path.islink(X):
     
    199207                    os.remove(X)
    200208                except:
    201                     print "Could not remove file %s" %X
     209                    print "Could not remove file %s" % X
    202210
    203211        os.rmdir(path)
    204        
    205        
    206 # ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
    207 def rmgeneric(path, __func__,verbose=False):
     212
     213
     214##
     215# @brief ??
     216# @param path
     217# @param __func__
     218# @param verbose True if this function is to be verbose.
     219# @note ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007
     220def rmgeneric(path, func, verbose=False):
    208221    ERROR_STR= """Error removing %(path)s, %(error)s """
    209222
    210223    try:
    211         __func__(path)
     224        func(path)
    212225        if verbose: print 'Removed ', path
    213226    except OSError, (errno, strerror):
    214227        print ERROR_STR % {'path' : path, 'error': strerror }
    215            
    216 def removeall(path,verbose=False):
    217 
     228
     229
     230##
     231# @brief Remove directory and all sub-directories.
     232# @param path Filesystem path to directory to remove.
     233# @param verbose True if this function is to be verbose.
     234def removeall(path, verbose=False):
    218235    if not os.path.isdir(path):
    219236        return
    220    
    221     files=os.listdir(path)
    222 
    223     for x in files:
    224         fullpath=os.path.join(path, x)
     237
     238    for x in os.listdir(path):
     239        fullpath = os.path.join(path, x)
    225240        if os.path.isfile(fullpath):
    226             f=os.remove
     241            f = os.remove
    227242            rmgeneric(fullpath, f)
    228243        elif os.path.isdir(fullpath):
    229244            removeall(fullpath)
    230             f=os.rmdir
    231             rmgeneric(fullpath, f,verbose)
    232 
    233 
    234 
     245            f = os.rmdir
     246            rmgeneric(fullpath, f, verbose)
     247
     248
     249##
     250# @brief Create a standard filename.
     251# @param datadir Directory where file is to be created.
     252# @param filename Filename 'stem'.
     253# @param format Format of the file, becomes filename extension.
     254# @param size Size of file, becomes part of filename.
     255# @param time Time (float), becomes part of filename.
     256# @return The complete filename path, including directory.
     257# @note The containing directory is created, if necessary.
    235258def create_filename(datadir, filename, format, size=None, time=None):
    236 
    237     import os
    238     #from anuga.config import data_dir
    239 
    240259    FN = check_dir(datadir) + filename
    241260
    242261    if size is not None:
    243         FN += '_size%d' %size
     262        FN += '_size%d' % size
    244263
    245264    if time is not None:
    246         FN += '_time%.2f' %time
     265        FN += '_time%.2f' % time
    247266
    248267    FN += '.' + format
     268
    249269    return FN
    250270
    251271
     272##
     273# @brief Get all files with a standard name and a given set of attributes.
     274# @param datadir Directory files must be in.
     275# @param filename Filename stem.
     276# @param format Filename extension.
     277# @param size Filename size.
     278# @return A list of fielnames (including directory) that match the attributes.
    252279def get_files(datadir, filename, format, size):
    253280    """Get all file (names) with given name, size and format
     
    256283    import glob
    257284
    258     import os
    259     #from anuga.config import data_dir
    260 
    261285    dir = check_dir(datadir)
    262 
    263     pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
     286    pattern = dir + os.sep + filename + '_size=%d*.%s' % (size, format)
     287
    264288    return glob.glob(pattern)
    265289
    266290
    267 
    268 #Generic class for storing output to e.g. visualisation or checkpointing
     291##
     292# @brief Generic class for storing output to e.g. visualisation or checkpointing
    269293class Data_format:
    270294    """Generic interface to data formats
    271295    """
    272296
    273 
    274     def __init__(self, domain, extension, mode = 'w'):
    275         assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
    276                                         '''   'w' (write)'''+\
    277                                         '''   'r' (read)''' +\
    278                                         '''   'a' (append)'''
     297    ##
     298    # @brief Instantiate this instance.
     299    # @param domain
     300    # @param extension
     301    # @param mode The mode of the underlying file.
     302    def __init__(self, domain, extension, mode='w'):
     303        assert mode in ['r', 'w', 'a'], \
     304               "Mode %s must be either:\n" % mode + \
     305               "   'w' (write)\n" + \
     306               "   'r' (read)\n" + \
     307               "   'a' (append)"
    279308
    280309        #Create filename
     
    282311                                        domain.get_name(), extension)
    283312
    284         #print 'F', self.filename
    285313        self.timestep = 0
    286314        self.domain = domain
    287        
    288 
    289315
    290316        # Exclude ghosts in case this is a parallel domain
    291         self.number_of_nodes = domain.number_of_full_nodes       
     317        self.number_of_nodes = domain.number_of_full_nodes
    292318        self.number_of_volumes = domain.number_of_full_triangles
    293         #self.number_of_volumes = len(domain)       
    294 
    295 
    296 
     319        #self.number_of_volumes = len(domain)
    297320
    298321        #FIXME: Should we have a general set_precision function?
    299322
    300323
    301 
    302 #Class for storing output to e.g. visualisation
     324##
     325# @brief Class for storing output to e.g. visualisation
    303326class Data_format_sww(Data_format):
    304327    """Interface to native NetCDF format (.sww) for storing model output
     
    312335    """
    313336
    314 
    315     def __init__(self, domain, mode = 'w',\
    316                  max_size = 2000000000,
    317                  recursion = False):
     337    ##
     338    # @brief Instantiate this instance.
     339    # @param domain ??
     340    # @param mode Mode of the underlying data file.
     341    # @param max_size ??
     342    # @param recursion ??
     343    # @note Prepare the undelying data file if mode is 'w'.
     344    def __init__(self, domain, mode='w', max_size=2000000000, recursion=False):
    318345        from Scientific.IO.NetCDF import NetCDFFile
    319346        from Numeric import Int, Float, Float32
    320347
    321348        self.precision = Float32 #Use single precision for quantities
     349        self.recursion = recursion
     350        self.mode = mode
    322351        if hasattr(domain, 'max_size'):
    323352            self.max_size = domain.max_size #file size max is 2Gig
    324353        else:
    325354            self.max_size = max_size
    326         self.recursion = recursion
    327         self.mode = mode
    328 
    329         Data_format.__init__(self, domain, 'sww', mode)
    330 
    331355        if hasattr(domain, 'minimum_storable_height'):
    332356            self.minimum_storable_height = domain.minimum_storable_height
     
    334358            self.minimum_storable_height = default_minimum_storable_height
    335359
     360        # call owning constructor
     361        Data_format.__init__(self, domain, 'sww', mode)
     362
    336363        # NetCDF file definition
    337364        fid = NetCDFFile(self.filename, mode)
    338 
    339365        if mode == 'w':
    340             description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
     366            description = 'Output from anuga.abstract_2d_finite_volumes ' \
     367                          'suitable for plotting'
    341368            self.writer = Write_sww()
    342369            self.writer.store_header(fid,
     
    355382            if domain.quantities_to_be_monitored is not None:
    356383                fid.createDimension('singleton', 1)
    357                 fid.createDimension('two', 2)               
     384                fid.createDimension('two', 2)
    358385
    359386                poly = domain.monitor_polygon
     
    363390                    fid.createVariable('extrema.polygon',
    364391                                       self.precision,
    365                                        ('polygon_length',
    366                                         'two'))
    367                     fid.variables['extrema.polygon'][:] = poly                                   
    368 
    369                    
     392                                       ('polygon_length', 'two'))
     393                    fid.variables['extrema.polygon'][:] = poly
     394
    370395                interval = domain.monitor_time_interval
    371396                if interval is not None:
     
    375400                    fid.variables['extrema.time_interval'][:] = interval
    376401
    377                
    378402                for q in domain.quantities_to_be_monitored:
    379                     #print 'doing', q
    380                     fid.createVariable(q+'.extrema', self.precision,
     403                    fid.createVariable(q + '.extrema', self.precision,
    381404                                       ('numbers_in_range',))
    382                     fid.createVariable(q+'.min_location', self.precision,
     405                    fid.createVariable(q + '.min_location', self.precision,
    383406                                       ('numbers_in_range',))
    384                     fid.createVariable(q+'.max_location', self.precision,
     407                    fid.createVariable(q + '.max_location', self.precision,
    385408                                       ('numbers_in_range',))
    386                     fid.createVariable(q+'.min_time', self.precision,
     409                    fid.createVariable(q + '.min_time', self.precision,
    387410                                       ('singleton',))
    388                     fid.createVariable(q+'.max_time', self.precision,
     411                    fid.createVariable(q + '.max_time', self.precision,
    389412                                       ('singleton',))
    390413
    391                    
    392414        fid.close()
    393415
    394 
     416    ##
     417    # @brief Store connectivity data into the underlying data file.
    395418    def store_connectivity(self):
    396419        """Specialisation of store_connectivity for net CDF format
     
    401424
    402425        from Scientific.IO.NetCDF import NetCDFFile
    403 
    404426        from Numeric import concatenate, Int
    405427
    406428        domain = self.domain
    407429
    408         #Get NetCDF
    409         fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
     430        # append to the NetCDF file
     431        fid = NetCDFFile(self.filename, 'a')
    410432
    411433        # Get the variables
     
    413435        y = fid.variables['y']
    414436        z = fid.variables['elevation']
    415 
    416437        volumes = fid.variables['volumes']
    417438
    418439        # Get X, Y and bed elevation Z
    419440        Q = domain.quantities['elevation']
    420         X,Y,Z,V = Q.get_vertex_values(xy=True,
    421                                       precision=self.precision)
    422 
    423         #
     441        X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
     442
     443        # store the connectivity data
    424444        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
    425445        self.writer.store_triangulation(fid,
     
    427447                                        V.astype(volumes.typecode()),
    428448                                        Z,
    429                                         points_georeference= \
    430                                         domain.geo_reference)
    431 
    432         # Close
     449                                        points_georeference=\
     450                                            domain.geo_reference)
     451
    433452        fid.close()
    434453
    435 
     454    ##
     455    # @brief Store time and named quantities to the underlying data file.
     456    # @param names The names of the quantities to store.
     457    # @note If 'names' not supplied, store a standard set of quantities.
    436458    def store_timestep(self, names=None):
    437459        """Store time and named quantities to file
    438460        """
    439        
     461
    440462        from Scientific.IO.NetCDF import NetCDFFile
    441463        import types
    442464        from time import sleep
    443465        from os import stat
    444 
    445466        from Numeric import choose
    446 
    447467
    448468        if names is None:
    449469            # Standard shallow water wave equation quantitites in ANUGA
    450470            names = ['stage', 'xmomentum', 'ymomentum']
    451        
    452         # Get NetCDF       
     471
     472        # Get NetCDF
    453473        retries = 0
    454474        file_open = False
     
    459479                # This could happen if someone was reading the file.
    460480                # In that case, wait a while and try again
    461                 msg = 'Warning (store_timestep): File %s could not be opened'\
    462                       %self.filename
    463                 msg += ' - trying step %s again' %self.domain.time
     481                msg = 'Warning (store_timestep): File %s could not be opened' \
     482                      % self.filename
     483                msg += ' - trying step %s again' % self.domain.time
    464484                print msg
    465485                retries += 1
     
    469489
    470490        if not file_open:
    471             msg = 'File %s could not be opened for append' %self.filename
     491            msg = 'File %s could not be opened for append' % self.filename
    472492            raise DataFileNotOpenError, msg
    473 
    474 
    475493
    476494        # Check to see if the file is already too big:
    477495        time = fid.variables['time']
    478         i = len(time)+1
     496        i = len(time) + 1
    479497        file_size = stat(self.filename)[6]
    480         file_size_increase =  file_size/i
    481         if file_size + file_size_increase > self.max_size*(2**self.recursion):
     498        file_size_increase = file_size / i
     499        if file_size + file_size_increase > self.max_size * 2**self.recursion:
    482500            # In order to get the file name and start time correct,
    483501            # I change the domain.filename and domain.starttime.
     
    487505            # Write a filename addon that won't break swollens reader
    488506            # (10.sww is bad)
    489             filename_ext = '_time_%s'%self.domain.time
     507            filename_ext = '_time_%s' % self.domain.time
    490508            filename_ext = filename_ext.replace('.', '_')
    491            
     509
    492510            # Remember the old filename, then give domain a
    493511            # name with the extension
    494512            old_domain_filename = self.domain.get_name()
    495513            if not self.recursion:
    496                 self.domain.set_name(old_domain_filename+filename_ext)
    497 
     514                self.domain.set_name(old_domain_filename + filename_ext)
    498515
    499516            # Change the domain starttime to the current time
     
    502519
    503520            # Build a new data_structure.
    504             next_data_structure=\
    505                 Data_format_sww(self.domain, mode=self.mode,\
    506                                 max_size = self.max_size,\
    507                                 recursion = self.recursion+1)
     521            next_data_structure = Data_format_sww(self.domain, mode=self.mode,
     522                                                  max_size=self.max_size,
     523                                                  recursion=self.recursion+1)
    508524            if not self.recursion:
    509                 print '    file_size = %s'%file_size
    510                 print '    saving file to %s'%next_data_structure.filename
     525                print '    file_size = %s' % file_size
     526                print '    saving file to %s' % next_data_structure.filename
     527
    511528            #set up the new data_structure
    512529            self.domain.writer = next_data_structure
     
    520537            #restore the old starttime and filename
    521538            self.domain.starttime = old_domain_starttime
    522             self.domain.set_name(old_domain_filename)           
     539            self.domain.set_name(old_domain_filename)
    523540        else:
    524541            self.recursion = False
     
    534551                names = [names]
    535552
    536             if 'stage' in names and 'xmomentum' in names and \
    537                'ymomentum' in names:
    538 
     553            if 'stage' in names \
     554               and 'xmomentum' in names \
     555               and 'ymomentum' in names:
    539556                # Get stage, elevation, depth and select only those
    540557                # values where minimum_storable_height is exceeded
    541558                Q = domain.quantities['stage']
    542                 A, _ = Q.get_vertex_values(xy = False,
    543                                            precision = self.precision)
     559                A, _ = Q.get_vertex_values(xy=False, precision=self.precision)
    544560                z = fid.variables['elevation']
    545561
    546                 storable_indices = A-z[:] >= self.minimum_storable_height
     562                storable_indices = (A-z[:] >= self.minimum_storable_height)
    547563                stage = choose(storable_indices, (z[:], A))
    548                
     564
    549565                # Define a zero vector of same size and type as A
    550566                # for use with momenta
    551567                null = zeros(size(A), A.typecode())
    552                
     568
    553569                # Get xmomentum where depth exceeds minimum_storable_height
    554570                Q = domain.quantities['xmomentum']
    555                 xmom, _ = Q.get_vertex_values(xy = False,
    556                                               precision = self.precision)
     571                xmom, _ = Q.get_vertex_values(xy=False,
     572                                              precision=self.precision)
    557573                xmomentum = choose(storable_indices, (null, xmom))
    558                
     574
    559575
    560576                # Get ymomentum where depth exceeds minimum_storable_height
    561577                Q = domain.quantities['ymomentum']
    562                 ymom, _ = Q.get_vertex_values(xy = False,
    563                                               precision = self.precision)
    564                 ymomentum = choose(storable_indices, (null, ymom))               
    565                
    566                 # Write quantities to NetCDF
    567                 self.writer.store_quantities(fid, 
     578                ymom, _ = Q.get_vertex_values(xy=False,
     579                                              precision=self.precision)
     580                ymomentum = choose(storable_indices, (null, ymom))
     581
     582                # Write quantities to underlying data  file
     583                self.writer.store_quantities(fid,
    568584                                             time=self.domain.time,
    569585                                             sww_precision=self.precision,
     
    572588                                             ymomentum=ymomentum)
    573589            else:
    574                 msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
    575                 msg += ' Instead I got: ' + str(names)
     590                msg = 'Quantities stored must be: stage, xmomentum, ymomentum, '
     591                msg += 'but I got: ' + str(names)
    576592                raise Exception, msg
    577            
    578 
    579593
    580594            # Update extrema if requested
     
    582596            if domain.quantities_to_be_monitored is not None:
    583597                for q, info in domain.quantities_to_be_monitored.items():
    584 
    585598                    if info['min'] is not None:
    586599                        fid.variables[q + '.extrema'][0] = info['min']
    587                         fid.variables[q + '.min_location'][:] =\
     600                        fid.variables[q + '.min_location'][:] = \
    588601                                        info['min_location']
    589602                        fid.variables[q + '.min_time'][0] = info['min_time']
    590                        
     603
    591604                    if info['max'] is not None:
    592605                        fid.variables[q + '.extrema'][1] = info['max']
    593                         fid.variables[q + '.max_location'][:] =\
     606                        fid.variables[q + '.max_location'][:] = \
    594607                                        info['max_location']
    595608                        fid.variables[q + '.max_time'][0] = info['max_time']
    596 
    597            
    598609
    599610            # Flush and close
     
    602613
    603614
    604 
    605 # Class for handling checkpoints data
     615##
     616# @brief Class for handling checkpoints data
    606617class Data_format_cpt(Data_format):
    607618    """Interface to native NetCDF format (.cpt)
    608619    """
    609620
    610 
    611     def __init__(self, domain, mode = 'w'):
     621    ##
     622    # @brief Initialize this instantiation.
     623    # @param domain ??
     624    # @param mode Mode of underlying data file (default WRITE).
     625    def __init__(self, domain, mode='w'):
    612626        from Scientific.IO.NetCDF import NetCDFFile
    613627        from Numeric import Int, Float, Float
     
    617631        Data_format.__init__(self, domain, 'sww', mode)
    618632
    619 
    620633        # NetCDF file definition
    621634        fid = NetCDFFile(self.filename, mode)
    622 
    623635        if mode == 'w':
    624636            #Create new file
     
    646658                                                'number_of_vertices'))
    647659
    648             fid.createVariable('time', self.precision,
    649                                ('number_of_timesteps',))
     660            fid.createVariable('time', self.precision, ('number_of_timesteps',))
    650661
    651662            #Allocate space for all quantities
     
    655666                                    'number_of_points'))
    656667
    657         #Close
    658668        fid.close()
    659669
    660 
     670    ##
     671    # @brief Store connectivity data to underlying data file.
    661672    def store_checkpoint(self):
    662         """
    663         Write x,y coordinates of triangles.
     673        """Write x,y coordinates of triangles.
    664674        Write connectivity (
    665675        constituting
     
    668678
    669679        from Scientific.IO.NetCDF import NetCDFFile
    670 
    671680        from Numeric import concatenate
    672681
     
    674683
    675684        #Get NetCDF
    676         fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
     685        fid = NetCDFFile(self.filename, 'a')
    677686
    678687        # Get the variables
     
    684693        # Get X, Y and bed elevation Z
    685694        Q = domain.quantities['elevation']
    686         X,Y,Z,V = Q.get_vertex_values(xy=True,
    687                       precision = self.precision)
    688 
    689 
     695        X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
    690696
    691697        x[:] = X.astype(self.precision)
     
    695701        volumes[:] = V
    696702
    697         #Close
    698703        fid.close()
    699704
    700 
     705    ##
     706    # @brief Store tiem and named quantities to underlying data file.
     707    # @param name
    701708    def store_timestep(self, name):
    702709        """Store time and named quantity to file
    703710        """
     711
    704712        from Scientific.IO.NetCDF import NetCDFFile
    705713        from time import sleep
     
    710718        while not file_open and retries < 10:
    711719            try:
    712                 fid = NetCDFFile(self.filename, 'a') #Open existing file
     720                fid = NetCDFFile(self.filename, 'a')
    713721            except IOError:
    714722                #This could happen if someone was reading the file.
    715723                #In that case, wait a while and try again
    716                 msg = 'Warning (store_timestep): File %s could not be opened'\
    717                   %self.filename
    718                 msg += ' - trying again'
     724                msg = 'Warning (store_timestep): File %s could not be opened' \
     725                      ' - trying again' % self.filename
    719726                print msg
    720727                retries += 1
     
    724731
    725732        if not file_open:
    726             msg = 'File %s could not be opened for append' %self.filename
    727             raise DataFileNotOPenError, msg
    728 
     733            msg = 'File %s could not be opened for append' % self.filename
     734            raise DataFileNotOpenError, msg
    729735
    730736        domain = self.domain
     
    740746        # Get quantity
    741747        Q = domain.quantities[name]
    742         A,V = Q.get_vertex_values(xy=False,
    743                                   precision = self.precision)
     748        A,V = Q.get_vertex_values(xy=False, precision=self.precision)
    744749
    745750        stage[i,:] = A.astype(self.precision)
     
    750755
    751756
    752 #### NED is national exposure database (name changed to NEXIS)
    753    
     757##
     758# @brief Class for National Exposure Database storage (NEXIS).
     759
    754760LAT_TITLE = 'LATITUDE'
    755761LONG_TITLE = 'LONGITUDE'
    756762X_TITLE = 'x'
    757763Y_TITLE = 'y'
     764
    758765class Exposure_csv:
     766
     767    ##
     768    # @brief Instantiate this instance.
     769    # @param file_name Name of underlying data file.
     770    # @param latitude_title ??
     771    # @param longitude_title ??
     772    # @param is_x_y_locations ??
     773    # @param x_title ??
     774    # @param y_title ??
     775    # @param refine_polygon ??
     776    # @param title_check_list ??
    759777    def __init__(self,file_name, latitude_title=LAT_TITLE,
    760778                 longitude_title=LONG_TITLE, is_x_y_locations=None,
     
    772790           each column is a 'set' of data
    773791
    774         Feel free to use/expand it to read other csv files.
    775            
    776            
     792        Feel free to use/expand it to read other csv files.
     793
    777794        It is not for adding and deleting rows
    778        
     795
    779796        Can geospatial handle string attributes? It's not made for them.
    780797        Currently it can't load and save string att's.
     
    785802
    786803        The location info is in the geospatial attribute.
    787        
    788        
    789804        """
     805
    790806        self._file_name = file_name
    791807        self._geospatial = None #
     
    794810        #The keys are the column titles.
    795811        #The values are lists of column data
    796        
     812
    797813        # self._title_index_dic is a dictionary.
    798814        #The keys are the column titles.
     
    801817            csv2dict(self._file_name, title_check_list=title_check_list)
    802818        try:
    803             #Have code here that handles caps or lower 
     819            #Have code here that handles caps or lower
    804820            lats = self._attribute_dic[latitude_title]
    805821            longs = self._attribute_dic[longitude_title]
    806            
    807822        except KeyError:
    808823            # maybe a warning..
     
    812827            pass
    813828        else:
    814             self._geospatial = Geospatial_data(latitudes = lats,
    815                  longitudes = longs)
     829            self._geospatial = Geospatial_data(latitudes=lats,
     830                                               longitudes=longs)
    816831
    817832        if is_x_y_locations is True:
     
    828843            else:
    829844                self._geospatial = Geospatial_data(data_points=points)
    830            
     845
    831846        # create a list of points that are in the refining_polygon
    832847        # described by a list of indexes representing the points
    833848
     849    ##
     850    # @brief Create a comparison method.
     851    # @param self This object.
     852    # @param other The other object.
     853    # @return True if objects are 'same'.
    834854    def __cmp__(self, other):
    835         #print "self._attribute_dic",self._attribute_dic
    836         #print "other._attribute_dic",other._attribute_dic
    837         #print "self._title_index_dic", self._title_index_dic
    838         #print "other._title_index_dic", other._title_index_dic
    839        
    840         #check that a is an instance of this class
     855        #check that 'other' is an instance of this class
    841856        if isinstance(self, type(other)):
    842857            result = cmp(self._attribute_dic, other._attribute_dic)
    843             if result <>0:
     858            if result <> 0:
    844859                return result
    845             # The order of the columns is important. Therefore..
     860
     861            # The order of the columns is important. Therefore..
    846862            result = cmp(self._title_index_dic, other._title_index_dic)
    847             if result <>0:
     863            if result <> 0:
    848864                return result
    849             for self_ls, other_ls in map(None,self._attribute_dic, \
    850                     other._attribute_dic):
     865            for self_ls, other_ls in map(None, self._attribute_dic,
     866                                         other._attribute_dic):
    851867                result = cmp(self._attribute_dic[self_ls],
    852868                             other._attribute_dic[other_ls])
    853                 if result <>0:
     869                if result <> 0:
    854870                    return result
    855871            return 0
    856872        else:
    857873            return 1
    858    
    859 
     874
     875    ##
     876    # @brief Get a list of column values given a column name.
     877    # @param column_name The name of the column to get values from.
     878    # @param use_refind_polygon Unused??
    860879    def get_column(self, column_name, use_refind_polygon=False):
    861880        """
     
    865884        do this to change a list of strings to a list of floats
    866885        time = [float(x) for x in time]
    867        
     886
    868887        Not implemented:
    869888        if use_refind_polygon is True, only return values in the
    870889        refined polygon
    871890        """
     891
    872892        if not self._attribute_dic.has_key(column_name):
    873             msg = 'Therer is  no column called %s!' %column_name
     893            msg = 'There is no column called %s!' % column_name
    874894            raise TitleValueError, msg
     895
    875896        return self._attribute_dic[column_name]
    876897
    877 
    878     def get_value(self, value_column_name,
    879                   known_column_name,
    880                   known_values,
    881                   use_refind_polygon=False):
     898    ##
     899    # @brief ??
     900    # @param value_column_name ??
     901    # @param known_column_name ??
     902    # @param known_values ??
     903    # @param use_refind_polygon ??
     904    def get_value(self, value_column_name, known_column_name,
     905                  known_values, use_refind_polygon=False):
    882906        """
    883907        Do linear interpolation on the known_colum, using the known_value,
    884908        to return a value of the column_value_name.
    885909        """
     910
    886911        pass
    887912
    888 
     913    ##
     914    # @brief Get a geospatial object that describes the locations.
     915    # @param use_refind_polygon Unused??
    889916    def get_location(self, use_refind_polygon=False):
    890917        """
     
    893920
    894921        Note, if there is not location info, this returns None.
    895        
     922
    896923        Not implemented:
    897924        if use_refind_polygon is True, only return values in the
    898925        refined polygon
    899926        """
     927
    900928        return self._geospatial
    901929
     930    ##
     931    # @brief Add column to 'end' of CSV data.
     932    # @param column_name The new column name.
     933    # @param column_values The new column values.
     934    # @param overwrite If True, overwrites last column, doesn't add at end.
    902935    def set_column(self, column_name, column_values, overwrite=False):
    903936        """
     
    906939
    907940        Set overwrite to True if you want to overwrite a column.
    908        
     941
    909942        Note, in column_name white space is removed and case is not checked.
    910943        Precondition
    911944        The column_name and column_values cannot have comma's in it.
    912945        """
    913        
     946
     947        # sanity checks
    914948        value_row_count = \
    915             len(self._attribute_dic[self._title_index_dic.keys()[0]])
    916         if len(column_values) <> value_row_count: 
     949                len(self._attribute_dic[self._title_index_dic.keys()[0]])
     950        if len(column_values) <> value_row_count:
    917951            msg = 'The number of column values must equal the number of rows.'
    918952            raise DataMissingValuesError, msg
    919        
     953
     954        # check new column name isn't already used, and we aren't overwriting
    920955        if self._attribute_dic.has_key(column_name):
    921956            if not overwrite:
    922                 msg = 'Column name %s already in use!' %column_name
     957                msg = 'Column name %s already in use!' % column_name
    923958                raise TitleValueError, msg
    924959        else:
    925960            # New title.  Add it to the title index.
    926961            self._title_index_dic[column_name] = len(self._title_index_dic)
     962
    927963        self._attribute_dic[column_name] = column_values
    928         #print "self._title_index_dic[column_name]",self._title_index_dic
    929 
     964
     965    ##
     966    # @brief Save the exposure CSV  file.
     967    # @param file_name If supplied, use this filename, not original.
    930968    def save(self, file_name=None):
    931969        """
    932970        Save the exposure csv file
    933971        """
     972
    934973        if file_name is None:
    935974            file_name = self._file_name
    936        
    937         fd = open(file_name,'wb')
     975
     976        fd = open(file_name, 'wb')
    938977        writer = csv.writer(fd)
    939        
     978
    940979        #Write the title to a cvs file
    941         line = [None]* len(self._title_index_dic)
     980        line = [None] * len(self._title_index_dic)
    942981        for title in self._title_index_dic.iterkeys():
    943             line[self._title_index_dic[title]]= title
     982            line[self._title_index_dic[title]] = title
    944983        writer.writerow(line)
    945        
     984
    946985        # Write the values to a cvs file
    947986        value_row_count = \
    948             len(self._attribute_dic[self._title_index_dic.keys()[0]])
     987                len(self._attribute_dic[self._title_index_dic.keys()[0]])
    949988        for row_i in range(value_row_count):
    950             line = [None]* len(self._title_index_dic)
     989            line = [None] * len(self._title_index_dic)
    951990            for title in self._title_index_dic.iterkeys():
    952                 line[self._title_index_dic[title]]= \
     991                line[self._title_index_dic[title]] = \
    953992                     self._attribute_dic[title][row_i]
    954993            writer.writerow(line)
    955994
    956995
     996##
     997# @brief Convert CSV file to a dictionary of arrays.
     998# @param file_name The path to the file to read.
    957999def csv2array(file_name):
    958     """Convert CSV files of the form
    959    
     1000    """
     1001    Convert CSV files of the form:
     1002
    9601003    time, discharge, velocity
    9611004    0.0,  1.2,       0.0
    9621005    0.1,  3.2,       1.1
    9631006    ...
    964    
     1007
    9651008    to a dictionary of numeric arrays.
    966    
    967    
     1009
     1010
    9681011    See underlying function csv2dict for more details.
    969    
    970     """
    971    
    972    
     1012    """
     1013
    9731014    X, _ = csv2dict(file_name)
    974    
     1015
    9751016    Y = {}
    9761017    for key in X.keys():
    9771018        Y[key] = array([float(x) for x in X[key]])
    978        
    979     return Y   
    980    
    981            
     1019
     1020    return Y
     1021
     1022
     1023##
     1024# @brief Read a CSV file and convert to a dictionary of {key: column}.
     1025# @param file_name The path to the file to read.
     1026# @param title_check_list List of titles that *must* be columns in the file.
     1027# @return Two dicts: ({key:column}, {title:index}).
     1028# @note WARNING: Values are returned as strings.
    9821029def csv2dict(file_name, title_check_list=None):
    9831030    """
    984     Load in the csv as a dic, title as key and column info as value, .
     1031    Load in the csv as a dic, title as key and column info as value.
    9851032    Also, create a dic, title as key and column index as value,
    986     to keep track of the column order. 
     1033    to keep track of the column order.
    9871034
    9881035    Two dictionaries are returned.
    989    
     1036
    9901037    WARNING: Values are returned as strings.
    991     do this to change a list of strings to a list of floats
    992         time = [float(x) for x in time]
    993 
    994        
    995     """
    996    
    997     #
     1038             Do this to change a list of strings to a list of floats
     1039                 time = [float(x) for x in time]
     1040    """
     1041
    9981042    attribute_dic = {}
    9991043    title_index_dic = {}
    10001044    titles_stripped = [] # list of titles
     1045
    10011046    reader = csv.reader(file(file_name))
    10021047
     
    10061051        titles_stripped.append(title.strip())
    10071052        title_index_dic[title.strip()] = i
    1008     title_count = len(titles_stripped)       
    1009     #print "title_index_dic",title_index_dic
     1053    title_count = len(titles_stripped)
     1054
     1055    # check required columns
    10101056    if title_check_list is not None:
    10111057        for title_check in title_check_list:
    1012             #msg = "Reading error.  This row is not present ", title_check
     1058            #msg = "Reading error. This row is not present ", title_check
    10131059            #assert title_index_dic.has_key(title_check), msg
    10141060            if not title_index_dic.has_key(title_check):
    10151061                #reader.close()
    1016                 msg = "Reading error.  This row is not present ", \
    1017                       title_check                     
     1062                msg = "Reading error. This row is not present ", title_check
    10181063                raise IOError, msg
    1019                
    1020    
    1021    
    1022     #create a dic of colum values, indexed by column title
     1064
     1065    #create a dic of column values, indexed by column title
    10231066    for line in reader:
    10241067        if len(line) <> title_count:
    10251068            raise IOError #FIXME make this nicer
    10261069        for i, value in enumerate(line):
    1027             attribute_dic.setdefault(titles_stripped[i],[]).append(value)
    1028        
     1070            attribute_dic.setdefault(titles_stripped[i], []).append(value)
     1071
    10291072    return attribute_dic, title_index_dic
    10301073
    10311074
    1032 #Auxiliary
    1033 def write_obj(filename,x,y,z):
    1034     """Store x,y,z vectors into filename (obj format)
     1075##
     1076# @brief
     1077# @param filename
     1078# @param x
     1079# @param y
     1080# @param z
     1081def write_obj(filename, x, y, z):
     1082    """Store x,y,z vectors into filename (obj format).
     1083
    10351084       Vectors are assumed to have dimension (M,3) where
    10361085       M corresponds to the number elements.
     
    10401089
    10411090       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
    1042 
    1043     """
    1044     #print 'Writing obj to %s' % filename
     1091    """
    10451092
    10461093    import os.path
     
    10521099        FN = filename + '.obj'
    10531100
    1054 
    10551101    outfile = open(FN, 'wb')
    10561102    outfile.write("# Triangulation as an obj file\n")
    10571103
    10581104    M, N = x.shape
    1059     assert N==3  #Assuming three vertices per element
     1105    assert N == 3  #Assuming three vertices per element
    10601106
    10611107    for i in range(M):
    10621108        for j in range(N):
    1063             outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
     1109            outfile.write("v %f %f %f\n" % (x[i,j], y[i,j], z[i,j]))
    10641110
    10651111    for i in range(M):
    1066         base = i*N
    1067         outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
     1112        base = i * N
     1113        outfile.write("f %d %d %d\n" % (base+1, base+2, base+3))
    10681114
    10691115    outfile.close()
     
    10741120########################################################
    10751121
     1122##
     1123# @brief Convert SWW data to OBJ data.
     1124# @param basefilename Stem of filename, needs size and extension added.
     1125# @param size The number of lines to write.
    10761126def sww2obj(basefilename, size):
    10771127    """Convert netcdf based data output to obj
    10781128    """
     1129
    10791130    from Scientific.IO.NetCDF import NetCDFFile
    1080 
    10811131    from Numeric import Float, zeros
    10821132
    1083     #Get NetCDF
     1133    # Get NetCDF
    10841134    FN = create_filename('.', basefilename, 'sww', size)
    10851135    print 'Reading from ', FN
    10861136    fid = NetCDFFile(FN, 'r')  #Open existing file for read
    1087 
    10881137
    10891138    # Get the variables
     
    11051154            zz[i,j] = z[i+j*M]
    11061155
    1107     #Write obj for bathymetry
     1156    # Write obj for bathymetry
    11081157    FN = create_filename('.', basefilename, 'obj', size)
    11091158    write_obj(FN,xx,yy,zz)
    11101159
    1111 
    1112     #Now read all the data with variable information, combine with
    1113     #x,y info and store as obj
    1114 
     1160    # Now read all the data with variable information, combine with
     1161    # x,y info and store as obj
    11151162    for k in range(len(time)):
    11161163        t = time[k]
     
    11211168                zz[i,j] = stage[k,i+j*M]
    11221169
    1123 
    11241170        #Write obj for variable data
    11251171        #FN = create_filename(basefilename, 'obj', size, time=t)
    11261172        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
    1127         write_obj(FN,xx,yy,zz)
    1128 
    1129 
     1173        write_obj(FN, xx, yy, zz)
     1174
     1175
     1176##
     1177# @brief
     1178# @param basefilename Stem of filename, needs size and extension added.
    11301179def dat2obj(basefilename):
    11311180    """Convert line based data output to obj
     
    11351184    import glob, os
    11361185    from anuga.config import data_dir
    1137 
    1138 
    1139     #Get bathymetry and x,y's
     1186    from Numeric import zeros, Float
     1187
     1188    # Get bathymetry and x,y's
    11401189    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
    1141 
    1142     from Numeric import zeros, Float
    11431190
    11441191    M = len(lines)  #Number of lines
     
    11471194    z = zeros((M,3), Float)
    11481195
    1149     ##i = 0
    11501196    for i, line in enumerate(lines):
    11511197        tokens = line.split()
    1152         values = map(float,tokens)
     1198        values = map(float, tokens)
    11531199
    11541200        for j in range(3):
     
    11571203            z[i,j] = values[j*3+2]
    11581204
    1159         ##i += 1
    1160 
    1161 
    1162     #Write obj for bathymetry
    1163     write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
    1164 
    1165 
    1166     #Now read all the data files with variable information, combine with
    1167     #x,y info
    1168     #and store as obj
    1169 
    1170     files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
    1171 
     1205    # Write obj for bathymetry
     1206    write_obj(data_dir + os.sep + basefilename + '_geometry', x, y, z)
     1207
     1208    # Now read all the data files with variable information, combine with
     1209    # x,y info and store as obj.
     1210
     1211    files = glob.glob(data_dir + os.sep + basefilename + '*.dat')
    11721212    for filename in files:
    11731213        print 'Processing %s' % filename
    11741214
    1175         lines = open(data_dir+os.sep+filename,'r').readlines()
     1215        lines = open(data_dir + os.sep + filename, 'r').readlines()
    11761216        assert len(lines) == M
    11771217        root, ext = os.path.splitext(filename)
    11781218
    1179         #Get time from filename
     1219        # Get time from filename
    11801220        i0 = filename.find('_time=')
    11811221        if i0 == -1:
     
    11911231            raise DataTimeError, 'Hmmmm'
    11921232
    1193 
    1194 
    1195         ##i = 0
    11961233        for i, line in enumerate(lines):
    11971234            tokens = line.split()
     
    12011238                z[i,j] = values[j]
    12021239
    1203             ##i += 1
    1204 
    1205         #Write obj for variable data
    1206         write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
    1207 
    1208 
    1209 def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
     1240        # Write obj for variable data
     1241        write_obj(data_dir + os.sep + basefilename + '_time=%.4f' % t, x, y, z)
     1242
     1243
     1244##
     1245# @brief Filter data file, selecting timesteps first:step:last.
     1246# @param filename1 Data file to filter.
     1247# @param filename2 File to write filtered timesteps to.
     1248# @param first First timestep.
     1249# @param last Last timestep.
     1250# @param step Timestep stride.
     1251def filter_netcdf(filename1, filename2, first=0, last=None, step=1):
    12101252    """Read netcdf filename1, pick timesteps first:step:last and save to
    12111253    nettcdf file filename2
    12121254    """
     1255
    12131256    from Scientific.IO.NetCDF import NetCDFFile
    12141257
    1215     #Get NetCDF
     1258    # Get NetCDF
    12161259    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
    12171260    outfile = NetCDFFile(filename2, 'w')  #Open new file
    12181261
    1219 
    1220     #Copy dimensions
     1262    # Copy dimensions
    12211263    for d in infile.dimensions:
    12221264        outfile.createDimension(d, infile.dimensions[d])
    12231265
     1266    # Copy variable definitions
    12241267    for name in infile.variables:
    12251268        var = infile.variables[name]
    12261269        outfile.createVariable(name, var.typecode(), var.dimensions)
    12271270
    1228 
    1229     #Copy the static variables
     1271    # Copy the static variables
    12301272    for name in infile.variables:
    12311273        if name == 'time' or name == 'stage':
    12321274            pass
    12331275        else:
    1234             #Copy
    12351276            outfile.variables[name][:] = infile.variables[name][:]
    12361277
    1237     #Copy selected timesteps
     1278    # Copy selected timesteps
    12381279    time = infile.variables['time']
    12391280    stage = infile.variables['stage']
     
    12471288    selection = range(first, last, step)
    12481289    for i, j in enumerate(selection):
    1249         print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
     1290        print 'Copying timestep %d of %d (%f)' % (j, last-first, time[j])
    12501291        newtime[i] = time[j]
    12511292        newstage[i,:] = stage[j,:]
    12521293
    1253     #Close
     1294    # Close
    12541295    infile.close()
    12551296    outfile.close()
    12561297
    12571298
     1299##
     1300# @brief Return instance of class of given format using filename.
     1301# @param domain Data domain (eg, 'sww', etc).
     1302# @param mode The mode to open domain in.
     1303# @return A class instance of required domain and mode.
    12581304#Get data objects
    12591305def get_dataobject(domain, mode='w'):
     
    12611307    """
    12621308
    1263     cls = eval('Data_format_%s' %domain.format)
     1309    cls = eval('Data_format_%s' % domain.format)
    12641310    return cls(domain, mode)
    12651311
    12661312
    1267 
    1268 
     1313##
     1314# @brief Convert DEM data  to PTS data.
     1315# @param basename_in Stem of input filename.
     1316# @param basename_out Stem of output filename.
     1317# @param easting_min
     1318# @param easting_max
     1319# @param northing_min
     1320# @param northing_max
     1321# @param use_cache
     1322# @param verbose
     1323# @return
    12691324def dem2pts(basename_in, basename_out=None,
    12701325            easting_min=None, easting_max=None,
     
    12891344    """
    12901345
    1291 
    1292 
    12931346    kwargs = {'basename_out': basename_out,
    12941347              'easting_min': easting_min,
     
    13101363
    13111364
     1365##
     1366# @brief
     1367# @param basename_in
     1368# @param basename_out
     1369# @param verbose
     1370# @param easting_min
     1371# @param easting_max
     1372# @param northing_min
     1373# @param northing_max
    13121374def _dem2pts(basename_in, basename_out=None, verbose=False,
    13131375            easting_min=None, easting_max=None,
     
    13271389
    13281390    # Get NetCDF
    1329     infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
     1391    infile = NetCDFFile(root + '.dem', 'r')
    13301392
    13311393    if verbose: print 'Reading DEM from %s' %(root + '.dem')
     
    13481410    units = infile.units
    13491411
    1350 
    13511412    # Get output file
    13521413    if basename_out == None:
     
    13561417
    13571418    if verbose: print 'Store to NetCDF file %s' %ptsname
     1419
    13581420    # NetCDF file definition
    13591421    outfile = NetCDFFile(ptsname, 'w')
     
    13611423    # Create new file
    13621424    outfile.institution = 'Geoscience Australia'
    1363     outfile.description = 'NetCDF pts format for compact and portable storage ' +\
    1364                       'of spatial point data'
     1425    outfile.description = 'NetCDF pts format for compact and portable ' \
     1426                          'storage of spatial point data'
     1427
    13651428    # Assign default values
    13661429    if easting_min is None: easting_min = xllcorner
     
    13831446    outfile.datum = datum
    13841447    outfile.units = units
    1385 
    13861448
    13871449    # Grid info (FIXME: probably not going to be used, but heck)
     
    14041466        for j in range(ncols):
    14051467            x = j*cellsize + xllcorner
    1406             if easting_min <= x <= easting_max and \
    1407                northing_min <= y <= northing_max:
     1468            if easting_min <= x <= easting_max \
     1469               and northing_min <= y <= northing_max:
    14081470                thisj = j
    14091471                thisi = i
    1410                 if dem_elevation_r[i,j] == NODATA_value: nn += 1
     1472                if dem_elevation_r[i,j] == NODATA_value:
     1473                    nn += 1
    14111474
    14121475                if k == 0:
    14131476                    i1_0 = i
    14141477                    j1_0 = j
     1478
    14151479                k += 1
    14161480
     
    14451509
    14461510    lenv = index2-index1+1
     1511
    14471512    # Store data
    14481513    global_index = 0
    14491514    # for i in range(nrows):
    1450     for i in range(i1_0,thisi+1,1):
    1451         if verbose and i%((nrows+10)/10)==0:
    1452             print 'Processing row %d of %d' %(i, nrows)
     1515    for i in range(i1_0, thisi+1, 1):
     1516        if verbose and i % ((nrows+10)/10) == 0:
     1517            print 'Processing row %d of %d' % (i, nrows)
    14531518
    14541519        lower_index = global_index
     
    14571522        no_NODATA = sum(v == NODATA_value)
    14581523        if no_NODATA > 0:
    1459             newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
     1524            newcols = lenv - no_NODATA  # ncols_in_bounding_box - no_NODATA
    14601525        else:
    1461             newcols = lenv # ncols_in_bounding_box
     1526            newcols = lenv              # ncols_in_bounding_box
    14621527
    14631528        telev = zeros(newcols, Float)
     
    14691534        #for j in range(ncols):
    14701535        for j in range(j1_0,index2+1,1):
    1471 
    14721536            x = j*cellsize + xllcorner
    1473             if easting_min <= x <= easting_max and \
    1474                northing_min <= y <= northing_max and \
    1475                dem_elevation_r[i,j] <> NODATA_value:
    1476                 tpoints[local_index, :] = [x-easting_min,y-northing_min]
     1537            if easting_min <= x <= easting_max \
     1538               and northing_min <= y <= northing_max \
     1539               and dem_elevation_r[i,j] <> NODATA_value:
     1540                tpoints[local_index, :] = [x-easting_min, y-northing_min]
    14771541                telev[local_index] = dem_elevation_r[i, j]
    14781542                global_index += 1
     
    14911555
    14921556
    1493 
     1557##
     1558# @brief  Return block of surface lines for each cross section
     1559# @param lines Iterble  of text lines to process.
     1560# @note BROKEN?  UNUSED?
    14941561def _read_hecras_cross_sections(lines):
    14951562    """Return block of surface lines for each cross section
     
    15021569    reading_surface = False
    15031570    for i, line in enumerate(lines):
    1504 
    15051571        if len(line.strip()) == 0:    #Ignore blanks
    15061572            continue
     
    15231589
    15241590
    1525 
    1526 
     1591##
     1592# @brief Convert HECRAS (.sdf) file to PTS file.
     1593# @param basename_in Sterm of input filename.
     1594# @param basename_out Sterm of output filename.
     1595# @param verbose True if this function is to be verbose.
    15271596def hecras_cross_sections2pts(basename_in,
    15281597                              basename_out=None,
     
    15311600
    15321601    Example:
    1533 
    15341602
    15351603# RAS export file created on Mon 15Aug2005 11:42
     
    15501618END HEADER:
    15511619
    1552 
    15531620Only the SURFACE LINE data of the following form will be utilised
    1554 
    15551621  CROSS-SECTION:
    15561622    STREAM ID:Southern-Wungong
     
    15871653    root = basename_in
    15881654
    1589     #Get ASCII file
    1590     infile = open(root + '.sdf', 'r')  #Open SDF file for read
     1655    # Get ASCII file
     1656    infile = open(root + '.sdf', 'r')
    15911657
    15921658    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
     
    15971663    if verbose: print 'Converting to pts format'
    15981664
     1665    # Scan through the header, picking up stuff we need.
    15991666    i = 0
    16001667    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
     
    16421709    i += 1
    16431710
    1644 
    1645     #Now read all points
     1711    # Now read all points
    16461712    points = []
    16471713    elevation = []
     
    16511717            elevation.append(entry[2])
    16521718
    1653 
    16541719    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
    16551720          %(j+1, number_of_cross_sections)
    16561721    assert j+1 == number_of_cross_sections, msg
    16571722
    1658     #Get output file
     1723    # Get output file, write PTS data
    16591724    if basename_out == None:
    16601725        ptsname = root + '.pts'
     
    16671732    geo.export_points_file(ptsname)
    16681733
    1669 def export_grid(basename_in, extra_name_out = None,
    1670                 quantities = None, # defaults to elevation
    1671                 timestep = None,
    1672                 reduction = None,
    1673                 cellsize = 10,
    1674                 number_of_decimal_places = None,
    1675                 NODATA_value = -9999,
    1676                 easting_min = None,
    1677                 easting_max = None,
    1678                 northing_min = None,
    1679                 northing_max = None,
    1680                 verbose = False,
    1681                 origin = None,
    1682                 datum = 'WGS84',
    1683                 format = 'ers'):
    1684     """
    1685    
    1686     Wrapper for sww2dem. - see sww2dem to find out what most of the
    1687     parameters do.
     1734
     1735##
     1736# @brief
     1737# @param basename_in
     1738# @param extra_name_out
     1739# @param quantities
     1740# @param timestep
     1741# @param reduction
     1742# @param cellsize
     1743# @param number_of_decimal_places
     1744# @param NODATA_value
     1745# @param easting_min
     1746# @param easting_max
     1747# @param northing_min
     1748# @param northing_max
     1749# @param verbose
     1750# @param origin
     1751# @param datum
     1752# @param format
     1753# @return
     1754def export_grid(basename_in, extra_name_out=None,
     1755                quantities=None, # defaults to elevation
     1756                timestep=None,
     1757                reduction=None,
     1758                cellsize=10,
     1759                number_of_decimal_places=None,
     1760                NODATA_value=-9999,
     1761                easting_min=None,
     1762                easting_max=None,
     1763                northing_min=None,
     1764                northing_max=None,
     1765                verbose=False,
     1766                origin=None,
     1767                datum='WGS84',
     1768                format='ers'):
     1769    """Wrapper for sww2dem.
     1770    See sww2dem to find out what most of the parameters do.
    16881771
    16891772    Quantities is a list of quantities.  Each quantity will be
     
    16951778    This function returns the names of the files produced.
    16961779
    1697     It will also produce as many output files as there are input sww files. 
    1698     """
    1699    
     1780    It will also produce as many output files as there are input sww files.
     1781    """
     1782
    17001783    if quantities is None:
    17011784        quantities = ['elevation']
    1702        
     1785
    17031786    if type(quantities) is str:
    17041787            quantities = [quantities]
     
    17061789    # How many sww files are there?
    17071790    dir, base = os.path.split(basename_in)
    1708     #print "basename_in",basename_in
    1709     #print "base",base
    1710 
    1711     iterate_over = get_all_swwfiles(dir,base,verbose)
    1712    
     1791
     1792    iterate_over = get_all_swwfiles(dir, base, verbose)
     1793
    17131794    if dir == "":
    17141795        dir = "." # Unix compatibility
    1715    
     1796
    17161797    files_out = []
    1717     #print 'sww_file',iterate_over
    17181798    for sww_file in iterate_over:
    17191799        for quantity in quantities:
     
    17211801                basename_out = sww_file + '_' + quantity
    17221802            else:
    1723                 basename_out = sww_file + '_' + quantity + '_' \
    1724                                + extra_name_out
    1725 #            print "basename_out", basename_out
    1726        
     1803                basename_out = sww_file + '_' + quantity + '_' + extra_name_out
     1804
    17271805            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
    1728                                quantity, 
     1806                               quantity,
    17291807                               timestep,
    17301808                               reduction,
     
    17411819                               format)
    17421820            files_out.append(file_out)
    1743     #print "basenames_out after",basenames_out
    17441821    return files_out
    17451822
    17461823
     1824##
     1825# @brief
     1826# @param production_dirs
     1827# @param output_dir
     1828# @param scenario_name
     1829# @param gauges_dir_name
     1830# @param plot_quantity
     1831# @param generate_fig
     1832# @param reportname
     1833# @param surface
     1834# @param time_min
     1835# @param time_max
     1836# @param title_on
     1837# @param verbose
     1838# @param nodes
    17471839def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
    1748                    plot_quantity, generate_fig = False,
    1749                    reportname = None, surface = False, time_min = None,
    1750                    time_max = None, title_on = False, verbose = True,
     1840                   plot_quantity, generate_fig=False,
     1841                   reportname=None, surface=False, time_min=None,
     1842                   time_max=None, title_on=False, verbose=True,
    17511843                   nodes=None):
    17521844    """
     
    17551847    warning - this function has no tests
    17561848    """
     1849
    17571850    if reportname == None:
    17581851        report = False
    17591852    else:
    17601853        report = True
    1761        
     1854
    17621855    if nodes is None:
    17631856        is_parallel = False
    17641857    else:
    17651858        is_parallel = True
    1766        
     1859
    17671860    # Generate figures
    17681861    swwfiles = {}
    1769    
    1770     if is_parallel is True:   
     1862    if is_parallel is True:
    17711863        for i in range(nodes):
    1772             print 'Sending node %d of %d' %(i,nodes)
     1864            print 'Sending node %d of %d' % (i, nodes)
    17731865            swwfiles = {}
    17741866            if not reportname == None:
    1775                 reportname = report_name + '_%s' %(i)
     1867                reportname = report_name + '_%s' % i
    17761868            for label_id in production_dirs.keys():
    17771869                if label_id == 'boundaries':
     
    17791871                else:
    17801872                    file_loc = output_dir + label_id + sep
    1781                     sww_extra = '_P%s_%s' %(i,nodes)
     1873                    sww_extra = '_P%s_%s' % (i, nodes)
    17821874                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
    1783                     print 'swwfile',swwfile
     1875                    print 'swwfile', swwfile
    17841876                    swwfiles[swwfile] = label_id
    17851877
     
    17871879                                              gauges_dir_name,
    17881880                                              production_dirs,
    1789                                               report = report,
    1790                                               reportname = reportname,
    1791                                               plot_quantity = plot_quantity,
    1792                                               generate_fig = generate_fig,
    1793                                               surface = surface,
    1794                                               time_min = time_min,
    1795                                               time_max = time_max,
    1796                                               title_on = title_on,
    1797                                               verbose = verbose)
    1798     else:   
    1799         for label_id in production_dirs.keys():       
     1881                                              report=report,
     1882                                              reportname=reportname,
     1883                                              plot_quantity=plot_quantity,
     1884                                              generate_fig=generate_fig,
     1885                                              surface=surface,
     1886                                              time_min=time_min,
     1887                                              time_max=time_max,
     1888                                              title_on=title_on,
     1889                                              verbose=verbose)
     1890    else:
     1891        for label_id in production_dirs.keys():
    18001892            if label_id == 'boundaries':
    18011893                print 'boundaries'
     
    18071899                swwfile = file_loc + scenario_name + '.sww'
    18081900            swwfiles[swwfile] = label_id
    1809        
     1901
    18101902        texname, elev_output = sww2timeseries(swwfiles,
    18111903                                              gauges_dir_name,
    18121904                                              production_dirs,
    1813                                               report = report,
    1814                                               reportname = reportname,
    1815                                               plot_quantity = plot_quantity,
    1816                                               generate_fig = generate_fig,
    1817                                               surface = surface,
    1818                                               time_min = time_min,
    1819                                               time_max = time_max,
    1820                                               title_on = title_on,
    1821                                               verbose = verbose)
    1822                                          
    1823 
    1824    
    1825 def sww2dem(basename_in, basename_out = None,
    1826             quantity = None, # defaults to elevation
    1827             timestep = None,
    1828             reduction = None,
    1829             cellsize = 10,
    1830             number_of_decimal_places = None,
    1831             NODATA_value = -9999,
    1832             easting_min = None,
    1833             easting_max = None,
    1834             northing_min = None,
    1835             northing_max = None,
    1836             verbose = False,
    1837             origin = None,
    1838             datum = 'WGS84',
    1839             format = 'ers'):
    1840 
     1905                                              report=report,
     1906                                              reportname=reportname,
     1907                                              plot_quantity=plot_quantity,
     1908                                              generate_fig=generate_fig,
     1909                                              surface=surface,
     1910                                              time_min=time_min,
     1911                                              time_max=time_max,
     1912                                              title_on=title_on,
     1913                                              verbose=verbose)
     1914
     1915
     1916##
     1917# @brief Convert SWW file to DEM file (.asc or .ers).
     1918# @param basename_in
     1919# @param basename_out
     1920# @param quantity
     1921# @param timestep
     1922# @param reduction
     1923# @param cellsize 
     1924# @param number_of_decimal_places
     1925# @param NODATA_value
     1926# @param easting_min
     1927# @param easting_max
     1928# @param northing_min
     1929# @param northing_max
     1930# @param verbose
     1931# @param origin
     1932# @param datum
     1933# @param format
     1934# @return
     1935def sww2dem(basename_in, basename_out=None,
     1936            quantity=None, # defaults to elevation
     1937            timestep=None,
     1938            reduction=None,
     1939            cellsize=10,
     1940            number_of_decimal_places=None,
     1941            NODATA_value=-9999,
     1942            easting_min=None,
     1943            easting_max=None,
     1944            northing_min=None,
     1945            northing_max=None,
     1946            verbose=False,
     1947            origin=None,
     1948            datum='WGS84',
     1949            format='ers'):
    18411950    """Read SWW file and convert to Digitial Elevation model format
    18421951    (.asc or .ers)
    18431952
    18441953    Example (ASC):
    1845 
    18461954    ncols         3121
    18471955    nrows         1800
     
    18541962    The number of decimal places can be specified by the user to save
    18551963    on disk space requirements by specifying in the call to sww2dem.
    1856    
     1964
    18571965    Also write accompanying file with same basename_in but extension .prj
    18581966    used to fix the UTM zone, datum, false northings and eastings.
     
    18851993
    18861994    import sys
    1887     from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
    1888          sometrue
    1889     from Numeric import array2string
     1995    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape
     1996    from Numeric import array2string, sometrue
    18901997
    18911998    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
     
    18972004    assert format.lower() in ['asc', 'ers'], msg
    18982005
    1899 
    19002006    false_easting = 500000
    19012007    false_northing = 10000000
     
    19032009    if quantity is None:
    19042010        quantity = 'elevation'
    1905        
     2011
    19062012    if reduction is None:
    19072013        reduction = max
    19082014
    19092015    if basename_out is None:
    1910         basename_out = basename_in + '_%s' %quantity
     2016        basename_out = basename_in + '_%s' % quantity
    19112017
    19122018    if quantity_formula.has_key(quantity):
     
    19152021    if number_of_decimal_places is None:
    19162022        number_of_decimal_places = 3
    1917        
     2023
    19182024    swwfile = basename_in + '.sww'
    19192025    demfile = basename_out + '.' + format
    19202026    # Note the use of a .ers extension is optional (write_ermapper_grid will
    19212027    # deal with either option
    1922    
    1923     #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
    1924    
     2028
    19252029    # Read sww file
    1926     if verbose: 
    1927         print 'Reading from %s' %swwfile
    1928         print 'Output directory is %s' %basename_out
    1929    
     2030    if verbose:
     2031        print 'Reading from %s' % swwfile
     2032        print 'Output directory is %s' % basename_out
     2033
    19302034    from Scientific.IO.NetCDF import NetCDFFile
    19312035    fid = NetCDFFile(swwfile)
     
    19422046    number_of_timesteps = fid.dimensions['number_of_timesteps']
    19432047    number_of_points = fid.dimensions['number_of_points']
    1944    
     2048
    19452049    if origin is None:
    1946 
    19472050        # Get geo_reference
    19482051        # sww files don't have to have a geo_ref
     
    19592062        xllcorner = origin[1]
    19602063        yllcorner = origin[2]
    1961 
    1962 
    19632064
    19642065    # FIXME: Refactor using code from Interpolation_function.statistics
     
    19962097            q = fid.variables[name][:].flat
    19972098            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
    1998            
     2099
    19992100    # Get quantity and reduce if applicable
    20002101    if verbose: print 'Processing quantity %s' %quantity
     
    20022103    # Turn NetCDF objects into Numeric arrays
    20032104    try:
    2004         q = fid.variables[quantity][:]
    2005        
    2006        
     2105        q = fid.variables[quantity][:]
    20072106    except:
    20082107        quantity_dict = {}
    20092108        for name in fid.variables.keys():
    2010             quantity_dict[name] = fid.variables[name][:] 
    2011         #Convert quantity expression to quantities found in sww file   
     2109            quantity_dict[name] = fid.variables[name][:]
     2110        #Convert quantity expression to quantities found in sww file
    20122111        q = apply_expression_to_dictionary(quantity, quantity_dict)
    2013     #print "q.shape",q.shape
     2112
    20142113    if len(q.shape) == 2:
    2015         #q has a time component and needs to be reduced along
    2016         #the temporal dimension
     2114        #q has a time component, must be reduced alongthe temporal dimension
    20172115        if verbose: print 'Reducing quantity %s' %quantity
    2018         q_reduced = zeros( number_of_points, Float )
    2019        
     2116        q_reduced = zeros(number_of_points, Float)
     2117
    20202118        if timestep is not None:
    20212119            for k in range(number_of_points):
    2022                 q_reduced[k] = q[timestep,k] 
     2120                q_reduced[k] = q[timestep,k]
    20232121        else:
    20242122            for k in range(number_of_points):
     
    20322130
    20332131    if verbose:
    2034         print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
    2035 
     2132        print 'Processed values for %s are in [%f, %f]' % \
     2133              (quantity, min(q), max(q))
    20362134
    20372135    #Create grid and update xll/yll corner and x,y
    2038 
    20392136    #Relative extent
    20402137    if easting_min is None:
     
    20572154    else:
    20582155        ymax = northing_max - yllcorner
    2059    
    20602156
    20612157    msg = 'xmax must be greater than or equal to xmin.\n'
     
    20652161    msg = 'yax must be greater than or equal to xmin.\n'
    20662162    msg += 'I got ymin = %f, ymax = %f' %(ymin, ymax)
    2067     assert ymax >= ymin, msg   
    2068    
     2163    assert ymax >= ymin, msg
     2164
    20692165    if verbose: print 'Creating grid'
    2070     ncols = int((xmax-xmin)/cellsize)+1
    2071     nrows = int((ymax-ymin)/cellsize)+1
    2072 
     2166    ncols = int((xmax-xmin)/cellsize) + 1
     2167    nrows = int((ymax-ymin)/cellsize) + 1
    20732168
    20742169    #New absolute reference and coordinates
    2075     newxllcorner = xmin+xllcorner
    2076     newyllcorner = ymin+yllcorner
    2077 
    2078     x = x+xllcorner-newxllcorner
    2079     y = y+yllcorner-newyllcorner
    2080    
    2081     vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
     2170    newxllcorner = xmin + xllcorner
     2171    newyllcorner = ymin + yllcorner
     2172
     2173    x = x + xllcorner - newxllcorner
     2174    y = y + yllcorner - newyllcorner
     2175
     2176    vertex_points = concatenate ((x[:,NewAxis], y[:,NewAxis]), axis=1)
    20822177    assert len(vertex_points.shape) == 2
    20832178
    2084     grid_points = zeros ( (ncols*nrows, 2), Float )
    2085 
     2179    grid_points = zeros ((ncols*nrows, 2), Float)
    20862180
    20872181    for i in xrange(nrows):
    20882182        if format.lower() == 'asc':
    2089             yg = i*cellsize
     2183            yg = i * cellsize
    20902184        else:
    20912185        #this will flip the order of the y values for ers
    2092             yg = (nrows-i)*cellsize
     2186            yg = (nrows-i) * cellsize
    20932187
    20942188        for j in xrange(ncols):
    2095             xg = j*cellsize
     2189            xg = j * cellsize
    20962190            k = i*ncols + j
    20972191
    2098             grid_points[k,0] = xg
    2099             grid_points[k,1] = yg
     2192            grid_points[k, 0] = xg
     2193            grid_points[k, 1] = yg
    21002194
    21012195    #Interpolate
     
    21122206    grid_values = interp.interpolate(q, grid_points).flat
    21132207
    2114 
    21152208    if verbose:
    21162209        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
     
    21242217        grid_values[i] = NODATA_value
    21252218
    2126 
    2127 
    2128 
    21292219    if format.lower() == 'ers':
    21302220        # setup ERS header information
    2131         grid_values = reshape(grid_values,(nrows, ncols))
     2221        grid_values = reshape(grid_values, (nrows, ncols))
    21322222        header = {}
    21332223        header['datum'] = '"' + datum + '"'
     
    21482238        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
    21492239
    2150 
    21512240        #Write
    21522241        if verbose: print 'Writing %s' %demfile
     2242
    21532243        import ermapper_grids
     2244
    21542245        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
    21552246
     
    21572248    else:
    21582249        #Write to Ascii format
    2159 
    21602250        #Write prj file
    21612251        prjfile = basename_out + '.prj'
     
    21742264        prjid.close()
    21752265
    2176 
    2177 
    21782266        if verbose: print 'Writing %s' %demfile
    21792267
     
    21872275        ascid.write('NODATA_value  %d\n' %NODATA_value)
    21882276
    2189 
    21902277        #Get bounding polygon from mesh
    21912278        #P = interp.mesh.get_boundary_polygon()
    21922279        #inside_indices = inside_polygon(grid_points, P)
    2193 
    21942280        for i in range(nrows):
    2195             if verbose and i%((nrows+10)/10)==0:
     2281            if verbose and i % ((nrows+10)/10) == 0:
    21962282                print 'Doing row %d of %d' %(i, nrows)
    21972283
     
    22002286            slice = grid_values[base_index:base_index+ncols]
    22012287            #s = array2string(slice, max_line_width=sys.maxint)
    2202             s = array2string(slice, max_line_width=sys.maxint, precision=number_of_decimal_places)
     2288            s = array2string(slice, max_line_width=sys.maxint,
     2289                             precision=number_of_decimal_places)
    22032290            ascid.write(s[1:-1] + '\n')
    2204 
    2205             #print
    2206             #for j in range(ncols):
    2207             #    index = base_index+j#
    2208             #    print grid_values[index],
    2209             #    ascid.write('%f ' %grid_values[index])
    2210             #ascid.write('\n')
    22112291
    22122292        #Close
    22132293        ascid.close()
    22142294        fid.close()
     2295
    22152296        return basename_out
    22162297
    2217 
    2218 #Backwards compatibility
     2298################################################################################
     2299# Obsolete functions - mainatined for backwards compatibility
     2300################################################################################
     2301
     2302##
     2303# @brief
     2304# @param basename_in
     2305# @param basename_out
     2306# @param quantity
     2307# @param timestep
     2308# @param reduction
     2309# @param cellsize
     2310# @param verbose
     2311# @param origin
     2312# @note OBSOLETE - use sww2dem() instead.
    22192313def sww2asc(basename_in, basename_out = None,
    22202314            quantity = None,
     
    22372331        format = 'asc')
    22382332
    2239 def sww2ers(basename_in, basename_out = None,
    2240             quantity = None,
    2241             timestep = None,
    2242             reduction = None,
    2243             cellsize = 10,
    2244             verbose = False,
    2245             origin = None,
    2246             datum = 'WGS84'):
     2333
     2334##
     2335# @brief
     2336# @param basename_in
     2337# @param basename_out
     2338# @param quantity
     2339# @param timestep
     2340# @param reduction
     2341# @param cellsize
     2342# @param verbose
     2343# @param origin
     2344# @param datum
     2345# @note OBSOLETE - use sww2dem() instead.
     2346def sww2ers(basename_in, basename_out=None,
     2347            quantity=None,
     2348            timestep=None,
     2349            reduction=None,
     2350            cellsize=10,
     2351            verbose=False,
     2352            origin=None,
     2353            datum='WGS84'):
    22472354    print 'sww2ers will soon be obsoleted - please use sww2dem'
    22482355    sww2dem(basename_in,
    2249             basename_out = basename_out,
    2250             quantity = quantity,
    2251             timestep = timestep,
    2252             reduction = reduction,
    2253             cellsize = cellsize,
    2254             number_of_decimal_places = number_of_decimal_places,
    2255             verbose = verbose,
    2256             origin = origin,
    2257             datum = datum,
    2258             format = 'ers')
    2259 ################################# END COMPATIBILITY ##############
    2260 
    2261 
    2262 
     2356            basename_out=basename_out,
     2357            quantity=quantity,
     2358            timestep=timestep,
     2359            reduction=reduction,
     2360            cellsize=cellsize,
     2361            number_of_decimal_places=number_of_decimal_places,
     2362            verbose=verbose,
     2363            origin=origin,
     2364            datum=datum,
     2365            format='ers')
     2366
     2367################################################################################
     2368# End of obsolete functions
     2369################################################################################
     2370
     2371
     2372##
     2373# @brief Convert SWW file to PTS file (at selected points).
     2374# @param basename_in Stem name of input SWW file.
     2375# @param basename_out Stem name of output file.
     2376# @param data_points If given, points where quantity is to be computed.
     2377# @param quantity Name (or expression) of existing quantity(s) (def: elevation).
     2378# @param timestep If given, output quantity at that timestep.
     2379# @param reduction If given, reduce quantity by this factor.
     2380# @param NODATA_value The NODATA value (default -9999).
     2381# @param verbose True if this function is to be verbose.
     2382# @param origin ??
    22632383def sww2pts(basename_in, basename_out=None,
    22642384            data_points=None,
     
    22692389            verbose=False,
    22702390            origin=None):
    2271             #datum = 'WGS84')
    2272 
    2273 
    22742391    """Read SWW file and convert to interpolated values at selected points
    22752392
    2276     The parameter quantity' must be the name of an existing quantity or
    2277     an expression involving existing quantities. The default is
    2278     'elevation'.
    2279 
    2280     if timestep (an index) is given, output quantity at that timestep
     2393    The parameter 'quantity' must be the name of an existing quantity or
     2394    an expression involving existing quantities. The default is 'elevation'.
     2395
     2396    if timestep (an index) is given, output quantity at that timestep.
    22812397
    22822398    if reduction is given use that to reduce quantity over all timesteps.
    22832399
    2284     data_points (Nx2 array) give locations of points where quantity is to be computed.
    2285    
     2400    data_points (Nx2 array) give locations of points where quantity is to
     2401    be computed.
    22862402    """
    22872403
    22882404    import sys
    2289     from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
    2290     from Numeric import array2string
    2291 
    2292     from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
    2293     from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
    2294 
     2405    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape
     2406    from Numeric import array2string, sometrue
     2407    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
     2408            separate_points_by_polygon
     2409    from anuga.abstract_2d_finite_volumes.util import \
     2410             apply_expression_to_dictionary
    22952411    from anuga.geospatial_data.geospatial_data import Geospatial_data
    22962412
     
    23022418
    23032419    if basename_out is None:
    2304         basename_out = basename_in + '_%s' %quantity
     2420        basename_out = basename_in + '_%s' % quantity
    23052421
    23062422    swwfile = basename_in + '.sww'
     
    23082424
    23092425    # Read sww file
    2310     if verbose: print 'Reading from %s' %swwfile
     2426    if verbose: print 'Reading from %s' % swwfile
    23112427    from Scientific.IO.NetCDF import NetCDFFile
    23122428    fid = NetCDFFile(swwfile)
     
    23202436    number_of_points = fid.dimensions['number_of_points']
    23212437    if origin is None:
    2322 
    23232438        # Get geo_reference
    23242439        # sww files don't have to have a geo_ref
     
    23262441            geo_reference = Geo_reference(NetCDFObject=fid)
    23272442        except AttributeError, e:
    2328             geo_reference = Geo_reference() #Default georef object
     2443            geo_reference = Geo_reference() # Default georef object
    23292444
    23302445        xllcorner = geo_reference.get_xllcorner()
     
    23352450        xllcorner = origin[1]
    23362451        yllcorner = origin[2]
    2337 
    2338 
    23392452
    23402453    # FIXME: Refactor using code from file_function.statistics
     
    23462459        print '------------------------------------------------'
    23472460        print 'Statistics of SWW file:'
    2348         print '  Name: %s' %swwfile
     2461        print '  Name: %s' % swwfile
    23492462        print '  Reference:'
    2350         print '    Lower left corner: [%f, %f]'\
    2351               %(xllcorner, yllcorner)
    2352         print '    Start time: %f' %fid.starttime[0]
     2463        print '    Lower left corner: [%f, %f]' % (xllcorner, yllcorner)
     2464        print '    Start time: %f' % fid.starttime[0]
    23532465        print '  Extent:'
    2354         print '    x [m] in [%f, %f], len(x) == %d'\
    2355               %(min(x.flat), max(x.flat), len(x.flat))
    2356         print '    y [m] in [%f, %f], len(y) == %d'\
    2357               %(min(y.flat), max(y.flat), len(y.flat))
    2358         print '    t [s] in [%f, %f], len(t) == %d'\
    2359               %(min(times), max(times), len(times))
     2466        print '    x [m] in [%f, %f], len(x) == %d' \
     2467              % (min(x.flat), max(x.flat), len(x.flat))
     2468        print '    y [m] in [%f, %f], len(y) == %d' \
     2469              % (min(y.flat), max(y.flat), len(y.flat))
     2470        print '    t [s] in [%f, %f], len(t) == %d' \
     2471              % (min(times), max(times), len(times))
    23602472        print '  Quantities [SI units]:'
    23612473        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
    23622474            q = fid.variables[name][:].flat
    2363             print '    %s in [%f, %f]' %(name, min(q), max(q))
    2364 
    2365 
     2475            print '    %s in [%f, %f]' % (name, min(q), max(q))
    23662476
    23672477    # Get quantity and reduce if applicable
    2368     if verbose: print 'Processing quantity %s' %quantity
     2478    if verbose: print 'Processing quantity %s' % quantity
    23692479
    23702480    # Turn NetCDF objects into Numeric arrays
     
    23732483        quantity_dict[name] = fid.variables[name][:]
    23742484
    2375 
    2376 
    2377     # Convert quantity expression to quantities found in sww file   
     2485    # Convert quantity expression to quantities found in sww file
    23782486    q = apply_expression_to_dictionary(quantity, quantity_dict)
    2379 
    2380 
    23812487
    23822488    if len(q.shape) == 2:
    23832489        # q has a time component and needs to be reduced along
    23842490        # the temporal dimension
    2385         if verbose: print 'Reducing quantity %s' %quantity
    2386         q_reduced = zeros( number_of_points, Float )
    2387 
     2491        if verbose: print 'Reducing quantity %s' % quantity
     2492
     2493        q_reduced = zeros(number_of_points, Float)
    23882494        for k in range(number_of_points):
    2389             q_reduced[k] = reduction( q[:,k] )
    2390 
     2495            q_reduced[k] = reduction(q[:,k])
    23912496        q = q_reduced
    23922497
     
    23952500    assert q.shape[0] == number_of_points
    23962501
    2397 
    23982502    if verbose:
    2399         print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
    2400 
     2503        print 'Processed values for %s are in [%f, %f]' \
     2504              % (quantity, min(q), max(q))
    24012505
    24022506    # Create grid and update xll/yll corner and x,y
    2403     vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
     2507    vertex_points = concatenate((x[:, NewAxis], y[:, NewAxis]), axis=1)
    24042508    assert len(vertex_points.shape) == 2
    24052509
    24062510    # Interpolate
    24072511    from anuga.fit_interpolate.interpolate import Interpolate
    2408     interp = Interpolate(vertex_points, volumes, verbose = verbose)
     2512    interp = Interpolate(vertex_points, volumes, verbose=verbose)
    24092513
    24102514    # Interpolate using quantity values
     
    24122516    interpolated_values = interp.interpolate(q, data_points).flat
    24132517
    2414 
    24152518    if verbose:
    2416         print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
    2417                                                       max(interpolated_values))
    2418 
    2419     # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
     2519        print 'Interpolated values are in [%f, %f]' % (min(interpolated_values),
     2520                                                       max(interpolated_values))
     2521
     2522    # Assign NODATA_value to all points outside bounding polygon
     2523    # (from interpolation mesh)
    24202524    P = interp.mesh.get_boundary_polygon()
    24212525    outside_indices = outside_polygon(data_points, P, closed=True)
     
    24242528        interpolated_values[i] = NODATA_value
    24252529
    2426 
    2427     # Store results   
    2428     G = Geospatial_data(data_points=data_points,
    2429                         attributes=interpolated_values)
     2530    # Store results
     2531    G = Geospatial_data(data_points=data_points, attributes=interpolated_values)
    24302532
    24312533    G.export_points_file(ptsfile, absolute = True)
     
    24342536
    24352537
    2436 def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
    2437                                   use_cache = False,
    2438                                   verbose = False):
    2439     """Read Digitial Elevation model from the following ASCII format (.asc)
     2538##
     2539# @brief Convert ASC file to DEM file.
     2540# @param basename_in Stem of input filename.
     2541# @param basename_out Stem of output filename.
     2542# @param use_cache ??
     2543# @param verbose True if this function is to be verbose.
     2544# @return
     2545# @note A PRJ file with same stem basename must exist and is used to fix the
     2546#       UTM zone, datum, false northings and eastings.
     2547def convert_dem_from_ascii2netcdf(basename_in, basename_out=None,
     2548                                  use_cache=False,
     2549                                  verbose=False):
     2550    """Read Digital Elevation model from the following ASCII format (.asc)
    24402551
    24412552    Example:
    2442 
    24432553    ncols         3121
    24442554    nrows         1800
     
    24512561    Convert basename_in + '.asc' to NetCDF format (.dem)
    24522562    mimicking the ASCII format closely.
    2453 
    24542563
    24552564    An accompanying file with same basename_in but extension .prj must exist
     
    24692578    """
    24702579
    2471 
    2472 
    24732580    kwargs = {'basename_out': basename_out, 'verbose': verbose}
    24742581
     
    24762583        from caching import cache
    24772584        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
    2478                        dependencies = [basename_in + '.asc',
    2479                                        basename_in + '.prj'],
    2480                        verbose = verbose)
     2585                       dependencies=[basename_in + '.asc',
     2586                                     basename_in + '.prj'],
     2587                       verbose=verbose)
    24812588
    24822589    else:
     
    24862593
    24872594
    2488 
    2489 
    2490 
    2491 
     2595##
     2596# @brief Convert an ASC file to a DEM file.
     2597# @param basename_in Stem of input filename.
     2598# @param basename_out Stem of output filename.
     2599# @param verbose True if this function is to be verbose.
    24922600def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
    24932601                                  verbose = False):
    2494     """Read Digitial Elevation model from the following ASCII format (.asc)
    2495 
    2496     Internal function. See public function convert_dem_from_ascii2netcdf for details.
     2602    """Read Digital Elevation model from the following ASCII format (.asc)
     2603
     2604    Internal function. See public function convert_dem_from_ascii2netcdf
     2605    for details.
    24972606    """
    24982607
     
    25012610    from Numeric import Float, array
    25022611
    2503     #root, ext = os.path.splitext(basename_in)
    25042612    root = basename_in
    25052613
    2506     ###########################################
    25072614    # Read Meta data
    2508     if verbose: print 'Reading METADATA from %s' %root + '.prj'
     2615    if verbose: print 'Reading METADATA from %s' % root + '.prj'
     2616
    25092617    metadatafile = open(root + '.prj')
    25102618    metalines = metadatafile.readlines()
     
    25432651    false_northing = float(L[1].strip())
    25442652
    2545     #print false_easting, false_northing, zone, datum
    2546 
    2547 
    2548     ###########################################
    25492653    #Read DEM data
    2550 
    25512654    datafile = open(basename_in + '.asc')
    25522655
    2553     if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
     2656    if verbose: print 'Reading DEM from %s' % basename_in + '.asc'
     2657
    25542658    lines = datafile.readlines()
    25552659    datafile.close()
     
    25612665
    25622666    # Do cellsize (line 4) before line 2 and 3
    2563     cellsize = float(lines[4].split()[1].strip())   
     2667    cellsize = float(lines[4].split()[1].strip())
    25642668
    25652669    # Checks suggested by Joaquim Luis
     
    25722676        xllcorner = float(xref[1].strip())
    25732677    else:
    2574         msg = 'Unknown keyword: %s' %xref[0].strip()
     2678        msg = 'Unknown keyword: %s' % xref[0].strip()
    25752679        raise Exception, msg
    25762680
     
    25812685        yllcorner = float(yref[1].strip())
    25822686    else:
    2583         msg = 'Unknown keyword: %s' %yref[0].strip()
     2687        msg = 'Unknown keyword: %s' % yref[0].strip()
    25842688        raise Exception, msg
    2585        
    25862689
    25872690    NODATA_value = int(lines[5].split()[1].strip())
    25882691
    25892692    assert len(lines) == nrows + 6
    2590 
    2591 
    2592     ##########################################
    2593 
    25942693
    25952694    if basename_out == None:
     
    25982697        netcdfname = basename_out + '.dem'
    25992698
    2600     if verbose: print 'Store to NetCDF file %s' %netcdfname
     2699    if verbose: print 'Store to NetCDF file %s' % netcdfname
     2700
    26012701    # NetCDF file definition
    26022702    fid = NetCDFFile(netcdfname, 'w')
     
    26042704    #Create new file
    26052705    fid.institution = 'Geoscience Australia'
    2606     fid.description = 'NetCDF DEM format for compact and portable storage ' +\
     2706    fid.description = 'NetCDF DEM format for compact and portable storage ' \
    26072707                      'of spatial point data'
    26082708
     
    26212721    fid.units = units
    26222722
    2623 
    26242723    # dimension definitions
    26252724    fid.createDimension('number_of_rows', nrows)
     
    26372736    for i, line in enumerate(lines[6:]):
    26382737        fields = line.split()
    2639         if verbose and i%((n+10)/10)==0:
    2640             print 'Processing row %d of %d' %(i, nrows)
    2641 
     2738        if verbose and i % ((n+10)/10) == 0:
     2739            print 'Processing row %d of %d' % (i, nrows)
    26422740        elevation[i, :] = array([float(x) for x in fields])
    26432741
     
    26452743
    26462744
    2647 
    2648 
    2649 
    2650 def ferret2sww(basename_in, basename_out = None,
    2651                verbose = False,
    2652                minlat = None, maxlat = None,
    2653                minlon = None, maxlon = None,
    2654                mint = None, maxt = None, mean_stage = 0,
    2655                origin = None, zscale = 1,
    2656                fail_on_NaN = True,
    2657                NaN_filler = 0,
    2658                elevation = None,
    2659                inverted_bathymetry = True
     2745##
     2746# @brief Convert 'ferret' file to SWW file.
     2747# @param basename_in Stem of input filename.
     2748# @param basename_out Stem of output filename.
     2749# @param verbose True if this function is to be verbose.
     2750# @param minlat
     2751# @param maxlat
     2752# @param minlon
     2753# @param maxlon
     2754# @param mint
     2755# @param maxt
     2756# @param mean_stage
     2757# @param origin
     2758# @param zscale
     2759# @param fail_on_NaN
     2760# @param NaN_filler
     2761# @param elevation
     2762# @param inverted_bathymetry
     2763def ferret2sww(basename_in, basename_out=None,
     2764               verbose=False,
     2765               minlat=None, maxlat=None,
     2766               minlon=None, maxlon=None,
     2767               mint=None, maxt=None, mean_stage=0,
     2768               origin=None, zscale=1,
     2769               fail_on_NaN=True,
     2770               NaN_filler=0,
     2771               elevation=None,
     2772               inverted_bathymetry=True
    26602773               ): #FIXME: Bathymetry should be obtained
    26612774                                  #from MOST somehow.
     
    27122825        if minlon != None:
    27132826            assert maxlon > minlon
    2714        
    2715 
    2716 
    2717     #Get NetCDF data
    2718     if verbose: print 'Reading files %s_*.nc' %basename_in
    2719     #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
    2720     file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
    2721     file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
    2722     file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
    2723     file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
     2827
     2828    # Get NetCDF data
     2829    if verbose: print 'Reading files %s_*.nc' % basename_in
     2830
     2831    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') # Wave amplitude (cm)
     2832    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') # Velocity (x) (cm/s)
     2833    file_v = NetCDFFile(basename_in + '_va.nc', 'r') # Velocity (y) (cm/s)
     2834    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  # Elevation (z) (m)
    27242835
    27252836    if basename_out is None:
     
    27362847        if dimension[:4] == 'TIME':
    27372848            dim_h_time = dimension
    2738 
    2739 #    print 'long:', dim_h_longitude
    2740 #    print 'lats:', dim_h_latitude
    2741 #    print 'times:', dim_h_time
    27422849
    27432850    times = file_h.variables[dim_h_time]
     
    27642871        if dimension[:4] == 'TIME':
    27652872            dim_u_time = dimension
    2766            
     2873
    27672874    # get dimensions for file_v
    27682875    for dimension in file_v.dimensions.keys():
     
    27742881            dim_v_time = dimension
    27752882
    2776 
    27772883    # Precision used by most for lat/lon is 4 or 5 decimals
    27782884    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
     
    27902896    if mint is None:
    27912897        jmin = 0
    2792         mint = times[0]       
     2898        mint = times[0]
    27932899    else:
    27942900        jmin = searchsorted(times, mint)
    2795        
     2901
    27962902    if maxt is None:
    27972903        jmax = len(times)
     
    28002906        jmax = searchsorted(times, maxt)
    28012907
    2802     #print "latitudes[:]",latitudes[:]
    2803     #print "longitudes[:]",longitudes [:]
    28042908    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
    28052909                                                  longitudes[:],
     
    28122916    longitudes = longitudes[lmin:lmax]
    28132917
    2814     #print "latitudes[:]",latitudes[:]
    2815     #print "longitudes[:]",longitudes [:]
    2816 
    28172918    if verbose: print 'cropping'
     2919
    28182920    zname = 'ELEVATION'
    28192921
     
    28372939    #        'print hmmm'
    28382940
    2839 
    2840 
    28412941    #Get missing values
    28422942    nan_ha = file_h.variables['HA'].missing_value[0]
     
    28542954    if sometrue (missing):
    28552955        if fail_on_NaN:
    2856             msg = 'NetCDFFile %s contains missing values'\
    2857                   %(basename_in+'_ha.nc')
     2956            msg = 'NetCDFFile %s contains missing values' \
     2957                  % basename_in + '_ha.nc'
    28582958            raise DataMissingValuesError, msg
    28592959        else:
     
    28632963    if sometrue (missing):
    28642964        if fail_on_NaN:
    2865             msg = 'NetCDFFile %s contains missing values'\
    2866                   %(basename_in+'_ua.nc')
     2965            msg = 'NetCDFFile %s contains missing values' \
     2966                  % basename_in + '_ua.nc'
    28672967            raise DataMissingValuesError, msg
    28682968        else:
     
    28722972    if sometrue (missing):
    28732973        if fail_on_NaN:
    2874             msg = 'NetCDFFile %s contains missing values'\
    2875                   %(basename_in+'_va.nc')
     2974            msg = 'NetCDFFile %s contains missing values' \
     2975                  % basename_in + '_va.nc'
    28762976            raise DataMissingValuesError, msg
    28772977        else:
    28782978            vspeed = vspeed*(missing==0) + missing*NaN_filler
    28792979
    2880 
    28812980    missing = (elevations == nan_e)
    28822981    if sometrue (missing):
    28832982        if fail_on_NaN:
    2884             msg = 'NetCDFFile %s contains missing values'\
    2885                   %(basename_in+'_e.nc')
     2983            msg = 'NetCDFFile %s contains missing values' \
     2984                  % basename_in + '_e.nc'
    28862985            raise DataMissingValuesError, msg
    28872986        else:
    28882987            elevations = elevations*(missing==0) + missing*NaN_filler
    2889 
    2890     #######
    2891 
    2892 
    28932988
    28942989    number_of_times = times.shape[0]
     
    29042999        print 'Statistics:'
    29053000        print '  Extent (lat/lon):'
    2906         print '    lat in [%f, %f], len(lat) == %d'\
    2907               %(min(latitudes.flat), max(latitudes.flat),
    2908                 len(latitudes.flat))
    2909         print '    lon in [%f, %f], len(lon) == %d'\
    2910               %(min(longitudes.flat), max(longitudes.flat),
    2911                 len(longitudes.flat))
    2912         print '    t in [%f, %f], len(t) == %d'\
    2913               %(min(times.flat), max(times.flat), len(times.flat))
     3001        print '    lat in [%f, %f], len(lat) == %d' \
     3002              % (min(latitudes.flat), max(latitudes.flat), len(latitudes.flat))
     3003        print '    lon in [%f, %f], len(lon) == %d' \
     3004              % (min(longitudes.flat), max(longitudes.flat),
     3005                 len(longitudes.flat))
     3006        print '    t in [%f, %f], len(t) == %d' \
     3007              % (min(times.flat), max(times.flat), len(times.flat))
    29143008
    29153009        q = amplitudes.flat
    29163010        name = 'Amplitudes (ha) [cm]'
    2917         print '  %s in [%f, %f]' %(name, min(q), max(q))
     3011        print '  %s in [%f, %f]' % (name, min(q), max(q))
    29183012
    29193013        q = uspeed.flat
    29203014        name = 'Speeds (ua) [cm/s]'
    2921         print '  %s in [%f, %f]' %(name, min(q), max(q))
     3015        print '  %s in [%f, %f]' % (name, min(q), max(q))
    29223016
    29233017        q = vspeed.flat
    29243018        name = 'Speeds (va) [cm/s]'
    2925         print '  %s in [%f, %f]' %(name, min(q), max(q))
     3019        print '  %s in [%f, %f]' % (name, min(q), max(q))
    29263020
    29273021        q = elevations.flat
    29283022        name = 'Elevations (e) [m]'
    2929         print '  %s in [%f, %f]' %(name, min(q), max(q))
    2930 
     3023        print '  %s in [%f, %f]' % (name, min(q), max(q))
    29313024
    29323025    # print number_of_latitudes, number_of_longitudes
    2933     number_of_points = number_of_latitudes*number_of_longitudes
    2934     number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
    2935 
     3026    number_of_points = number_of_latitudes * number_of_longitudes
     3027    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
    29363028
    29373029    file_h.close()
     
    29403032    file_e.close()
    29413033
    2942 
    29433034    # NetCDF file definition
    29443035    outfile = NetCDFFile(swwname, 'w')
    29453036
    2946     description = 'Converted from Ferret files: %s, %s, %s, %s'\
    2947                   %(basename_in + '_ha.nc',
    2948                     basename_in + '_ua.nc',
    2949                     basename_in + '_va.nc',
    2950                     basename_in + '_e.nc')
    2951    
     3037    description = 'Converted from Ferret files: %s, %s, %s, %s' \
     3038                  % (basename_in + '_ha.nc',
     3039                     basename_in + '_ua.nc',
     3040                     basename_in + '_va.nc',
     3041                     basename_in + '_e.nc')
     3042
    29523043    # Create new file
    29533044    starttime = times[0]
    2954    
     3045
    29553046    sww = Write_sww()
    29563047    sww.store_header(outfile, times, number_of_volumes,
    29573048                     number_of_points, description=description,
    2958                      verbose=verbose,sww_precision=Float)
     3049                     verbose=verbose, sww_precision=Float)
    29593050
    29603051    # Store
     
    29633054    y = zeros(number_of_points, Float)  #Northing
    29643055
    2965 
    29663056    if verbose: print 'Making triangular grid'
    29673057
    29683058    # Check zone boundaries
    2969     refzone, _, _ = redfearn(latitudes[0],longitudes[0])
     3059    refzone, _, _ = redfearn(latitudes[0], longitudes[0])
    29703060
    29713061    vertices = {}
    29723062    i = 0
    2973     for k, lat in enumerate(latitudes):       #Y direction
    2974         for l, lon in enumerate(longitudes):  #X direction
    2975 
     3063    for k, lat in enumerate(latitudes):       # Y direction
     3064        for l, lon in enumerate(longitudes):  # X direction
    29763065            vertices[l,k] = i
    29773066
    29783067            zone, easting, northing = redfearn(lat,lon)
    29793068
    2980             msg = 'Zone boundary crossed at longitude =', lon
     3069            #msg = 'Zone boundary crossed at longitude =', lon
    29813070            #assert zone == refzone, msg
    29823071            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
     
    29873076    #Construct 2 triangles per 'rectangular' element
    29883077    volumes = []
    2989     for l in range(number_of_longitudes-1):    #X direction
    2990         for k in range(number_of_latitudes-1): #Y direction
     3078    for l in range(number_of_longitudes-1):    # X direction
     3079        for k in range(number_of_latitudes-1): # Y direction
    29913080            v1 = vertices[l,k+1]
    29923081            v2 = vertices[l,k]
     
    30003089
    30013090    if origin is None:
    3002         origin = Geo_reference(refzone,min(x),min(y))
     3091        origin = Geo_reference(refzone, min(x), min(y))
    30033092    geo_ref = write_NetCDF_georeference(origin, outfile)
    3004    
     3093
    30053094    if elevation is not None:
    30063095        z = elevation
    30073096    else:
    30083097        if inverted_bathymetry:
    3009             z = -1*elevations
     3098            z = -1 * elevations
    30103099        else:
    30113100            z = elevations
     
    30143103    #FIXME use the Write_sww instance(sww) to write this info
    30153104    from Numeric import resize
    3016     z = resize(z,outfile.variables['z'][:].shape)
     3105    z = resize(z, outfile.variables['z'][:].shape)
    30173106    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    30183107    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
     
    30213110    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
    30223111
    3023 
    3024 
    30253112    #Time stepping
    30263113    stage = outfile.variables['stage']
     
    30293116
    30303117    if verbose: print 'Converting quantities'
     3118
    30313119    n = len(times)
    30323120    for j in range(n):
    3033         if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
     3121        if verbose and j % ((n+10)/10) == 0: print '  Doing %d of %d' %(j, n)
     3122
    30343123        i = 0
    3035         for k in range(number_of_latitudes):      #Y direction
    3036             for l in range(number_of_longitudes): #X direction
    3037                 w = zscale*amplitudes[j,k,l]/100 + mean_stage
     3124        for k in range(number_of_latitudes):      # Y direction
     3125            for l in range(number_of_longitudes): # X direction
     3126                w = zscale * amplitudes[j,k,l] / 100 + mean_stage
    30383127                stage[j,i] = w
    30393128                h = w - z[i]
     
    30533142        print '  Name: %s' %swwname
    30543143        print '  Reference:'
    3055         print '    Lower left corner: [%f, %f]'\
    3056               %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
     3144        print '    Lower left corner: [%f, %f]' \
     3145              % (geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
    30573146        print ' Start time: %f' %starttime
    30583147        print '    Min time: %f' %mint
    30593148        print '    Max time: %f' %maxt
    30603149        print '  Extent:'
    3061         print '    x [m] in [%f, %f], len(x) == %d'\
    3062               %(min(x.flat), max(x.flat), len(x.flat))
    3063         print '    y [m] in [%f, %f], len(y) == %d'\
    3064               %(min(y.flat), max(y.flat), len(y.flat))
    3065         print '    t [s] in [%f, %f], len(t) == %d'\
    3066               %(min(times), max(times), len(times))
     3150        print '    x [m] in [%f, %f], len(x) == %d' \
     3151              % (min(x.flat), max(x.flat), len(x.flat))
     3152        print '    y [m] in [%f, %f], len(y) == %d' \
     3153              % (min(y.flat), max(y.flat), len(y.flat))
     3154        print '    t [s] in [%f, %f], len(t) == %d' \
     3155              % (min(times), max(times), len(times))
    30673156        print '  Quantities [SI units]:'
    30683157        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
    30693158            q = outfile.variables[name][:].flat
    3070             print '    %s in [%f, %f]' %(name, min(q), max(q))
    3071 
    3072 
     3159            print '    %s in [%f, %f]' % (name, min(q), max(q))
    30733160
    30743161    outfile.close()
    30753162
    30763163
    3077 
    3078 
    3079 
     3164##
     3165# @brief Convert time-series text file to TMS file.
     3166# @param filename
     3167# @param quantity_names
     3168# @param time_as_seconds
    30803169def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
    30813170    """Template for converting typical text files with time series to
    30823171    NetCDF tms file.
    30833172
    3084 
    30853173    The file format is assumed to be either two fields separated by a comma:
    30863174
     
    30933181
    30943182    or time (seconds), value0 value1 value2 ...
    3095    
     3183
    30963184      0.0, 1.328223 0 0
    30973185      0.1, 1.292912 0 0
     
    31073195    from anuga.utilities.numerical_tools import ensure_numeric
    31083196
    3109 
    3110 
    3111     fid = open(filename + '.txt')
     3197    file_text = filename + '.txt'
     3198    fid = open(file_text)
    31123199    line = fid.readline()
    31133200    fid.close()
    31143201
    31153202    fields = line.split(',')
    3116     msg = 'File %s must have the format date, value0 value1 value2 ...'
     3203    msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \
     3204          % file_text
    31173205    assert len(fields) == 2, msg
    31183206
     
    31213209            starttime = calendar.timegm(time.strptime(fields[0], time_format))
    31223210        except ValueError:
    3123             msg = 'First field in file %s must be' %filename
    3124             msg += ' date-time with format %s.\n' %time_format
    3125             msg += 'I got %s instead.' %fields[0]
     3211            msg = 'First field in file %s must be' % file_text
     3212            msg += ' date-time with format %s.\n' % time_format
     3213            msg += 'I got %s instead.' % fields[0]
    31263214            raise DataTimeError, msg
    31273215    else:
     
    31323220            raise DataTimeError, msg
    31333221
    3134 
    3135     #Split values
     3222    # Split values
    31363223    values = []
    31373224    for value in fields[1].split():
     
    31433230    assert len(q.shape) == 1, msg
    31443231
    3145 
    3146 
    3147     #Read times proper
     3232    # Read times proper
    31483233    from Numeric import zeros, Float, alltrue
    31493234    from anuga.config import time_format
    31503235    import time, calendar
    31513236
    3152     fid = open(filename + '.txt')
     3237    fid = open(file_text)
    31533238    lines = fid.readlines()
    31543239    fid.close()
     
    31573242    d = len(q)
    31583243
    3159     T = zeros(N, Float)       #Time
    3160     Q = zeros((N, d), Float)  #Values
     3244    T = zeros(N, Float)       # Time
     3245    Q = zeros((N, d), Float)  # Values
    31613246
    31623247    for i, line in enumerate(lines):
     
    31713256            Q[i, j] = float(value)
    31723257
    3173     msg = 'File %s must list time as a monotonuosly ' %filename
     3258    msg = 'File %s must list time as a monotonuosly ' % filename
    31743259    msg += 'increasing sequence'
    3175     assert alltrue( T[1:] - T[:-1] > 0 ), msg
     3260    assert alltrue(T[1:] - T[:-1] > 0), msg
    31763261
    31773262    #Create NetCDF file
     
    31803265    fid = NetCDFFile(filename + '.tms', 'w')
    31813266
    3182 
    31833267    fid.institution = 'Geoscience Australia'
    31843268    fid.description = 'Time series'
    3185 
    31863269
    31873270    #Reference point
     
    31943277    #fid.createDimension('number_of_vertices', 3)
    31953278
    3196 
    31973279    fid.createDimension('number_of_timesteps', len(T))
    31983280
     
    32053287            name = quantity_names[i]
    32063288        except:
    3207             name = 'Attribute%d'%i
     3289            name = 'Attribute%d' % i
    32083290
    32093291        fid.createVariable(name, Float, ('number_of_timesteps',))
     
    32133295
    32143296
     3297##
     3298# @brief Get the extents of a NetCDF data file.
     3299# @param file_name The path to the SWW file.
     3300# @return A list of x, y, z and stage limits (min, max).
    32153301def extent_sww(file_name):
    3216     """
    3217     Read in an sww file.
    3218 
    3219     Input;
     3302    """Read in an sww file.
     3303
     3304    Input:
    32203305    file_name - the sww file
    32213306
    3222     Output;
    3223     z - Vector of bed elevation
    3224     volumes - Array.  Each row has 3 values, representing
    3225     the vertices that define the volume
    3226     time - Vector of the times where there is stage information
    3227     stage - array with respect to time and vertices (x,y)
    3228     """
    3229 
     3307    Output:
     3308    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
     3309    """
    32303310
    32313311    from Scientific.IO.NetCDF import NetCDFFile
    32323312
    3233     #Check contents
    32343313    #Get NetCDF
    32353314    fid = NetCDFFile(file_name, 'r')
     
    32393318    y = fid.variables['y'][:]
    32403319    stage = fid.variables['stage'][:]
    3241     #print "stage",stage
    3242     #print "stage.shap",stage.shape
    3243     #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
    3244     #print "min(stage)",min(stage)
    32453320
    32463321    fid.close()
    32473322
    3248     return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
    3249 
    3250 
     3323    return [min(x), max(x), min(y), max(y), min(stage.flat), max(stage.flat)]
     3324
     3325
     3326##
     3327# @brief
     3328# @param filename
     3329# @param boundary
     3330# @param t
     3331# @param fail_if_NaN
     3332# @param NaN_filler
     3333# @param verbose
     3334# @param very_verbose
     3335# @return
    32513336def sww2domain(filename, boundary=None, t=None,
    3252                fail_if_NaN=True ,NaN_filler=0,
    3253                verbose = False, very_verbose = False):
     3337               fail_if_NaN=True, NaN_filler=0,
     3338               verbose=False, very_verbose=False):
    32543339    """
    32553340    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
     
    32603345    give a different final boundary, or crash.
    32613346    """
    3262     NaN=9.969209968386869e+036
    3263     #initialise NaN.
    32643347
    32653348    from Scientific.IO.NetCDF import NetCDFFile
     
    32673350    from Numeric import asarray, transpose, resize
    32683351
     3352    # initialise NaN.
     3353    NaN = 9.969209968386869e+036
     3354
    32693355    if verbose: print 'Reading from ', filename
    3270     fid = NetCDFFile(filename, 'r')    #Open existing file for read
    3271     time = fid.variables['time']       #Timesteps
     3356
     3357    fid = NetCDFFile(filename, 'r')    # Open existing file for read
     3358    time = fid.variables['time']       # Timesteps
    32723359    if t is None:
    32733360        t = time[-1]
     
    32753362
    32763363    # Get the variables as Numeric arrays
    3277     x = fid.variables['x'][:]             #x-coordinates of vertices
    3278     y = fid.variables['y'][:]             #y-coordinates of vertices
    3279     elevation = fid.variables['elevation']     #Elevation
    3280     stage = fid.variables['stage']     #Water level
    3281     xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
    3282     ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
     3364    x = fid.variables['x'][:]                   # x-coordinates of vertices
     3365    y = fid.variables['y'][:]                   # y-coordinates of vertices
     3366    elevation = fid.variables['elevation']      # Elevation
     3367    stage = fid.variables['stage']              # Water level
     3368    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
     3369    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
    32833370
    32843371    starttime = fid.starttime[0]
    3285     volumes = fid.variables['volumes'][:] #Connectivity
    3286     coordinates = transpose(asarray([x.tolist(),y.tolist()]))
    3287     #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 )
    3288     # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 )
     3372    volumes = fid.variables['volumes'][:]       # Connectivity
     3373    coordinates = transpose(asarray([x.tolist(), y.tolist()]))
     3374    # FIXME (Ole): Something like this might be better:
     3375    #                 concatenate((x, y), axis=1)
     3376    # or              concatenate((x[:,NewAxis], x[:,NewAxis]), axis=1)
    32893377
    32903378    conserved_quantities = []
     
    32933381
    32943382    # get geo_reference
    3295     #sww files don't have to have a geo_ref
    3296     try:
     3383    try:                             # sww files don't have to have a geo_ref
    32973384        geo_reference = Geo_reference(NetCDFObject=fid)
    3298     except: #AttributeError, e:
     3385    except: # AttributeError, e:
    32993386        geo_reference = None
    33003387
    33013388    if verbose: print '    getting quantities'
     3389
    33023390    for quantity in fid.variables.keys():
    33033391        dimensions = fid.variables[quantity].dimensions
    33043392        if 'number_of_timesteps' in dimensions:
    33053393            conserved_quantities.append(quantity)
    3306             interpolated_quantities[quantity]=\
    3307                   interpolated_quantity(fid.variables[quantity][:],time_interp)
    3308         else: other_quantities.append(quantity)
     3394            interpolated_quantities[quantity] = \
     3395                  interpolated_quantity(fid.variables[quantity][:], time_interp)
     3396        else:
     3397            other_quantities.append(quantity)
    33093398
    33103399    other_quantities.remove('x')
     
    33193408    except:
    33203409        pass
    3321        
    33223410
    33233411    conserved_quantities.remove('time')
    33243412
    33253413    if verbose: print '    building domain'
     3414
    33263415    #    From domain.Domain:
    33273416    #    domain = Domain(coordinates, volumes,\
     
    33303419    #                    xllcorner=xllcorner, yllcorner=yllcorner)
    33313420
    3332     #   From shallow_water.Domain:
    3333     coordinates=coordinates.tolist()
    3334     volumes=volumes.tolist()
    3335     #FIXME:should this be in mesh?(peter row)
    3336     if fid.smoothing == 'Yes': unique = False
    3337     else: unique = True
     3421    # From shallow_water.Domain:
     3422    coordinates = coordinates.tolist()
     3423    volumes = volumes.tolist()
     3424    # FIXME:should this be in mesh? (peter row)
     3425    if fid.smoothing == 'Yes':
     3426        unique = False
     3427    else:
     3428        unique = True
    33383429    if unique:
    3339         coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
    3340 
     3430        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
    33413431
    33423432    try:
     
    33443434    except AssertionError, e:
    33453435        fid.close()
    3346         msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
     3436        msg = 'Domain could not be created: %s. ' \
     3437              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
    33473438        raise DataDomainError, msg
    33483439
     
    33523443    domain.geo_reference = geo_reference
    33533444
    3354     domain.starttime=float(starttime)+float(t)
    3355     domain.time=0.0
     3445    domain.starttime = float(starttime) + float(t)
     3446    domain.time = 0.0
    33563447
    33573448    for quantity in other_quantities:
     
    33593450            NaN = fid.variables[quantity].missing_value
    33603451        except:
    3361             pass #quantity has no missing_value number
     3452            pass                       # quantity has no missing_value number
    33623453        X = fid.variables[quantity][:]
    33633454        if very_verbose:
    3364             print '       ',quantity
    3365             print '        NaN =',NaN
     3455            print '       ', quantity
     3456            print '        NaN =', NaN
    33663457            print '        max(X)'
    3367             print '       ',max(X)
     3458            print '       ', max(X)
    33683459            print '        max(X)==NaN'
    3369             print '       ',max(X)==NaN
     3460            print '       ', max(X)==NaN
    33703461            print ''
    3371         if (max(X)==NaN) or (min(X)==NaN):
     3462        if max(X) == NaN or min(X) == NaN:
    33723463            if fail_if_NaN:
    3373                 msg = 'quantity "%s" contains no_data entry'%quantity
     3464                msg = 'quantity "%s" contains no_data entry' % quantity
    33743465                raise DataMissingValuesError, msg
    33753466            else:
    3376                 data = (X<>NaN)
    3377                 X = (X*data)+(data==0)*NaN_filler
     3467                data = (X != NaN)
     3468                X = (X*data) + (data==0)*NaN_filler
    33783469        if unique:
    3379             X = resize(X,(len(X)/3,3))
    3380         domain.set_quantity(quantity,X)
     3470            X = resize(X, (len(X)/3, 3))
     3471        domain.set_quantity(quantity, X)
    33813472    #
    33823473    for quantity in conserved_quantities:
     
    33843475            NaN = fid.variables[quantity].missing_value
    33853476        except:
    3386             pass #quantity has no missing_value number
     3477            pass                       # quantity has no missing_value number
    33873478        X = interpolated_quantities[quantity]
    33883479        if very_verbose:
    33893480            print '       ',quantity
    3390             print '        NaN =',NaN
     3481            print '        NaN =', NaN
    33913482            print '        max(X)'
    3392             print '       ',max(X)
     3483            print '       ', max(X)
    33933484            print '        max(X)==NaN'
    3394             print '       ',max(X)==NaN
     3485            print '       ', max(X)==NaN
    33953486            print ''
    3396         if (max(X)==NaN) or (min(X)==NaN):
     3487        if max(X) == NaN or min(X) == NaN:
    33973488            if fail_if_NaN:
    3398                 msg = 'quantity "%s" contains no_data entry'%quantity
     3489                msg = 'quantity "%s" contains no_data entry' % quantity
    33993490                raise DataMissingValuesError, msg
    34003491            else:
    3401                 data = (X<>NaN)
    3402                 X = (X*data)+(data==0)*NaN_filler
     3492                data = (X != NaN)
     3493                X = (X*data) + (data==0)*NaN_filler
    34033494        if unique:
    3404             X = resize(X,(X.shape[0]/3,3))
    3405         domain.set_quantity(quantity,X)
     3495            X = resize(X, (X.shape[0]/3, 3))
     3496        domain.set_quantity(quantity, X)
    34063497
    34073498    fid.close()
     3499
    34083500    return domain
    34093501
    34103502
    3411 def interpolated_quantity(saved_quantity,time_interp):
    3412 
    3413     #given an index and ratio, interpolate quantity with respect to time.
    3414     index,ratio = time_interp
     3503##
     3504# @brief Interpolate a quantity wrt time.
     3505# @param saved_quantity The quantity to interpolate.
     3506# @param time_interp (index, ratio)
     3507# @return A vector of interpolated values.
     3508def interpolated_quantity(saved_quantity, time_interp):
     3509    '''Given an index and ratio, interpolate quantity with respect to time.'''
     3510
     3511    index, ratio = time_interp
     3512
    34153513    Q = saved_quantity
     3514
    34163515    if ratio > 0:
    3417         q = (1-ratio)*Q[index]+ ratio*Q[index+1]
     3516        q = (1-ratio)*Q[index] + ratio*Q[index+1]
    34183517    else:
    34193518        q = Q[index]
     3519
    34203520    #Return vector of interpolated values
    34213521    return q
    34223522
    34233523
    3424 def get_time_interp(time,t=None):
     3524##
     3525# @brief
     3526# @parm time
     3527# @param t
     3528# @return An (index, ration) tuple.
     3529def get_time_interp(time, t=None):
    34253530    #Finds the ratio and index for time interpolation.
    34263531    #It is borrowed from previous abstract_2d_finite_volumes code.
     
    34333538        tau = t
    34343539        index=0
    3435         msg = 'Time interval derived from file %s [%s:%s]'\
    3436             %('FIXMEfilename', T[0], T[-1])
    3437         msg += ' does not match model time: %s' %tau
     3540        msg = 'Time interval derived from file %s [%s:%s]' \
     3541              % ('FIXMEfilename', T[0], T[-1])
     3542        msg += ' does not match model time: %s' % tau
    34383543        if tau < time[0]: raise DataTimeError, msg
    34393544        if tau > time[-1]: raise DataTimeError, msg
     
    34473552            #t is now between index and index+1
    34483553            ratio = (tau - time[index])/(time[index+1] - time[index])
    3449     return (index,ratio)
    3450 
    3451 
    3452 def weed(coordinates,volumes,boundary = None):
    3453     if type(coordinates)==ArrayType:
     3554
     3555    return (index, ratio)
     3556
     3557
     3558##
     3559# @brief
     3560# @param coordinates
     3561# @param volumes
     3562# @param boundary
     3563def weed(coordinates, volumes, boundary=None):
     3564    if type(coordinates) == ArrayType:
    34543565        coordinates = coordinates.tolist()
    3455     if type(volumes)==ArrayType:
     3566    if type(volumes) == ArrayType:
    34563567        volumes = volumes.tolist()
    34573568
     
    34633574        if point_dict.has_key(point):
    34643575            unique = True
    3465             same_point[i]=point
     3576            same_point[i] = point
    34663577            #to change all point i references to point j
    34673578        else:
    3468             point_dict[point]=i
    3469             same_point[i]=point
     3579            point_dict[point] = i
     3580            same_point[i] = point
    34703581
    34713582    coordinates = []
     
    34743585        point = tuple(point)
    34753586        coordinates.append(list(point))
    3476         point_dict[point]=i
    3477         i+=1
    3478 
     3587        point_dict[point] = i
     3588        i += 1
    34793589
    34803590    for volume in volumes:
    34813591        for i in range(len(volume)):
    34823592            index = volume[i]
    3483             if index>-1:
    3484                 volume[i]=point_dict[same_point[index]]
     3593            if index > -1:
     3594                volume[i] = point_dict[same_point[index]]
    34853595
    34863596    new_boundary = {}
     
    34933603            #('exterior, pond') or replaced ('pond')(peter row)
    34943604
    3495             if new_boundary.has_key((point0,point1)):
    3496                 new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
    3497                                               #+','+label
    3498 
    3499             elif new_boundary.has_key((point1,point0)):
    3500                 new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
    3501                                               #+','+label
    3502             else: new_boundary[(point0,point1)]=label
     3605            if new_boundary.has_key((point0, point1)):
     3606                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
     3607
     3608            elif new_boundary.has_key((point1, point0)):
     3609                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
     3610            else: new_boundary[(point0, point1)] = label
    35033611
    35043612        boundary = new_boundary
    35053613
    3506     return coordinates,volumes,boundary
    3507 
    3508 
     3614    return coordinates, volumes, boundary
     3615
     3616
     3617##
     3618# @brief Read DEM file, decimate data, write new DEM file.
     3619# @param basename_in Stem of the input filename.
     3620# @param stencil
     3621# @param cellsize_new New cell size to resample on.
     3622# @param basename_out Stem of the output filename.
     3623# @param verbose True if this function is to be verbose.
    35093624def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
    35103625                 verbose=False):
     
    35333648    #Open existing netcdf file to read
    35343649    infile = NetCDFFile(inname, 'r')
    3535     if verbose: print 'Reading DEM from %s' %inname
     3650
     3651    if verbose: print 'Reading DEM from %s' % inname
    35363652
    35373653    #Read metadata
     
    35573673        outname = basename_out + '.dem'
    35583674
    3559     if verbose: print 'Write decimated NetCDF file to %s' %outname
     3675    if verbose: print 'Write decimated NetCDF file to %s' % outname
    35603676
    35613677    #Determine some dimensions for decimated grid
     
    35723688    #Create new file
    35733689    outfile.institution = 'Geoscience Australia'
    3574     outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
    3575                            'of spatial point data'
     3690    outfile.description = 'NetCDF DEM format for compact and portable ' \
     3691                          'storage of spatial point data'
     3692
    35763693    #Georeferencing
    35773694    outfile.zone = zone
     
    36053722    for i in range(nrows_new):
    36063723        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
     3724
    36073725        lower_index = global_index
    3608         telev =  zeros(ncols_new, Float)
     3726        telev = zeros(ncols_new, Float)
    36093727        local_index = 0
    36103728        trow = i * cellsize_ratio
     
    36123730        for j in range(ncols_new):
    36133731            tcol = j * cellsize_ratio
    3614             tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
     3732            tmp = dem_elevation_r[trow:trow+nrows_stencil,
     3733                                  tcol:tcol+ncols_stencil]
    36153734
    36163735            #if dem contains 1 or more NODATA_values set value in
     
    36293748        elevation[lower_index:upper_index] = telev
    36303749
    3631     assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
     3750    assert global_index == nrows_new*ncols_new, \
     3751           'index not equal to number of points'
    36323752
    36333753    infile.close()
     
    36353755
    36363756
    3637 
    3638 
    3639 def tsh2sww(filename, verbose=False):
     3757##
     3758# @brief
     3759# @param filename
     3760# @param verbose
     3761def tsh2sww(filename, verbose=False):
    36403762    """
    36413763    to check if a tsh/msh file 'looks' good.
    36423764    """
    36433765
    3644 
    36453766    if verbose == True:print 'Creating domain from', filename
     3767
    36463768    domain = pmesh_to_domain_instance(filename, Domain)
     3769
    36473770    if verbose == True:print "Number of triangles = ", len(domain)
    36483771
     
    36513774    file_path, filename = path.split(filename)
    36523775    filename, ext = path.splitext(filename)
    3653     domain.set_name(filename)   
     3776    domain.set_name(filename)
    36543777    domain.reduction = mean
     3778
    36553779    if verbose == True:print "file_path",file_path
    3656     if file_path == "":file_path = "."
     3780
     3781    if file_path == "":
     3782        file_path = "."
    36573783    domain.set_datadir(file_path)
    36583784
     
    36603786        print "Output written to " + domain.get_datadir() + sep + \
    36613787              domain.get_name() + "." + domain.format
     3788
    36623789    sww = get_dataobject(domain)
    36633790    sww.store_connectivity()
     
    36653792
    36663793
     3794##
     3795# @brief Convert CSIRO ESRI file to an SWW boundary file.
     3796# @param bath_dir
     3797# @param elevation_dir
     3798# @param ucur_dir
     3799# @param vcur_dir
     3800# @param sww_file
     3801# @param minlat
     3802# @param maxlat
     3803# @param minlon
     3804# @param maxlon
     3805# @param zscale
     3806# @param mean_stage
     3807# @param fail_on_NaN
     3808# @param elevation_NaN_filler
     3809# @param bath_prefix
     3810# @param elevation_prefix
     3811# @param verbose
     3812# @note Also convert latitude and longitude to UTM. All coordinates are
     3813#       assumed to be given in the GDA94 datum.
    36673814def asc_csiro2sww(bath_dir,
    36683815                  elevation_dir,
     
    36703817                  vcur_dir,
    36713818                  sww_file,
    3672                   minlat = None, maxlat = None,
    3673                   minlon = None, maxlon = None,
     3819                  minlat=None, maxlat=None,
     3820                  minlon=None, maxlon=None,
    36743821                  zscale=1,
    3675                   mean_stage = 0,
    3676                   fail_on_NaN = True,
    3677                   elevation_NaN_filler = 0,
     3822                  mean_stage=0,
     3823                  fail_on_NaN=True,
     3824                  elevation_NaN_filler=0,
    36783825                  bath_prefix='ba',
    36793826                  elevation_prefix='el',
     
    37003847    The time period is less than 24hrs and uniform.
    37013848    """
     3849
    37023850    from Scientific.IO.NetCDF import NetCDFFile
    37033851
     
    37083856    # go in to the bath dir and load the only file,
    37093857    bath_files = os.listdir(bath_dir)
    3710 
    37113858    bath_file = bath_files[0]
    37123859    bath_dir_file =  bath_dir + os.sep + bath_file
    3713     bath_metadata,bath_grid =  _read_asc(bath_dir_file)
     3860    bath_metadata, bath_grid =  _read_asc(bath_dir_file)
    37143861
    37153862    #Use the date.time of the bath file as a basis for
     
    37253872    vcur_files = os.listdir(vcur_dir)
    37263873    elevation_files.sort()
     3874
    37273875    # the first elevation file should be the
    37283876    # file with the same base name as the bath data
     
    37313879    number_of_latitudes = bath_grid.shape[0]
    37323880    number_of_longitudes = bath_grid.shape[1]
    3733     number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
    3734 
    3735     longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
     3881    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
     3882
     3883    longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \
    37363884                  for x in range(number_of_longitudes)]
    3737     latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
     3885    latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \
    37383886                 for y in range(number_of_latitudes)]
    37393887
    3740      # reverse order of lat, so the fist lat represents the first grid row
     3888     # reverse order of lat, so the first lat represents the first grid row
    37413889    latitudes.reverse()
    37423890
    37433891    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
    3744                                                  minlat=minlat, maxlat=maxlat,
    3745                                                  minlon=minlon, maxlon=maxlon)
    3746 
     3892                                                  minlat=minlat, maxlat=maxlat,
     3893                                                  minlon=minlon, maxlon=maxlon)
    37473894
    37483895    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
     
    37523899    number_of_longitudes = len(longitudes)
    37533900    number_of_times = len(os.listdir(elevation_dir))
    3754     number_of_points = number_of_latitudes*number_of_longitudes
    3755     number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
     3901    number_of_points = number_of_latitudes * number_of_longitudes
     3902    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2
    37563903
    37573904    #Work out the times
    37583905    if len(elevation_files) > 1:
    37593906        # Assume: The time period is less than 24hrs.
    3760         time_period = (int(elevation_files[1][-3:]) - \
    3761                       int(elevation_files[0][-3:]))*60*60
     3907        time_period = (int(elevation_files[1][-3:]) \
     3908                       - int(elevation_files[0][-3:])) * 60*60
    37623909        times = [x*time_period for x in range(len(elevation_files))]
    37633910    else:
    37643911        times = [0.0]
    3765 
    37663912
    37673913    if verbose:
     
    37693915        print 'Statistics:'
    37703916        print '  Extent (lat/lon):'
    3771         print '    lat in [%f, %f], len(lat) == %d'\
    3772               %(min(latitudes), max(latitudes),
    3773                 len(latitudes))
    3774         print '    lon in [%f, %f], len(lon) == %d'\
    3775               %(min(longitudes), max(longitudes),
    3776                 len(longitudes))
    3777         print '    t in [%f, %f], len(t) == %d'\
    3778               %(min(times), max(times), len(times))
     3917        print '    lat in [%f, %f], len(lat) == %d' \
     3918              % (min(latitudes), max(latitudes), len(latitudes))
     3919        print '    lon in [%f, %f], len(lon) == %d' \
     3920              % (min(longitudes), max(longitudes), len(longitudes))
     3921        print '    t in [%f, %f], len(t) == %d' \
     3922              % (min(times), max(times), len(times))
    37793923
    37803924    ######### WRITE THE SWW FILE #############
     3925
    37813926    # NetCDF file definition
    37823927    outfile = NetCDFFile(sww_file, 'w')
     
    37863931    outfile.description = 'Converted from XXX'
    37873932
    3788 
    37893933    #For sww compatibility
    37903934    outfile.smoothing = 'Yes'
     
    37943938    outfile.starttime = starttime = times[0]
    37953939
    3796 
    37973940    # dimension definitions
    37983941    outfile.createDimension('number_of_volumes', number_of_volumes)
    3799 
    38003942    outfile.createDimension('number_of_vertices', 3)
    38013943    outfile.createDimension('number_of_points', number_of_points)
     
    38143956                                            'number_of_vertices'))
    38153957
    3816     outfile.createVariable('time', precision,
    3817                            ('number_of_timesteps',))
    3818 
    3819     outfile.createVariable('stage', precision,
    3820                            ('number_of_timesteps',
    3821                             'number_of_points'))
    3822 
    3823     outfile.createVariable('xmomentum', precision,
    3824                            ('number_of_timesteps',
    3825                             'number_of_points'))
    3826 
    3827     outfile.createVariable('ymomentum', precision,
    3828                            ('number_of_timesteps',
    3829                             'number_of_points'))
     3958    outfile.createVariable('time', precision, ('number_of_timesteps',))
     3959
     3960    outfile.createVariable('stage', precision, ('number_of_timesteps',
     3961                                                'number_of_points'))
     3962
     3963    outfile.createVariable('xmomentum', precision, ('number_of_timesteps',
     3964                                                    'number_of_points'))
     3965
     3966    outfile.createVariable('ymomentum', precision, ('number_of_timesteps',
     3967                                                    'number_of_points'))
    38303968
    38313969    #Store
    38323970    from anuga.coordinate_transforms.redfearn import redfearn
     3971
    38333972    x = zeros(number_of_points, Float)  #Easting
    38343973    y = zeros(number_of_points, Float)  #Northing
    38353974
    38363975    if verbose: print 'Making triangular grid'
     3976
    38373977    #Get zone of 1st point.
    3838     refzone, _, _ = redfearn(latitudes[0],longitudes[0])
     3978    refzone, _, _ = redfearn(latitudes[0], longitudes[0])
    38393979
    38403980    vertices = {}
     
    38423982    for k, lat in enumerate(latitudes):
    38433983        for l, lon in enumerate(longitudes):
    3844 
    38453984            vertices[l,k] = i
    38463985
    3847             zone, easting, northing = redfearn(lat,lon)
    3848 
    3849             msg = 'Zone boundary crossed at longitude =', lon
     3986            zone, easting, northing = redfearn(lat, lon)
     3987
     3988            #msg = 'Zone boundary crossed at longitude =', lon
    38503989            #assert zone == refzone, msg
    38513990            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
     
    38533992            y[i] = northing
    38543993            i += 1
    3855 
    38563994
    38573995    #Construct 2 triangles per 'rectangular' element
     
    38714009    volumes = array(volumes)
    38724010
    3873     geo_ref = Geo_reference(refzone,min(x),min(y))
     4011    geo_ref = Geo_reference(refzone, min(x), min(y))
    38744012    geo_ref.write_NetCDF(outfile)
    38754013
    38764014    # This will put the geo ref in the middle
    3877     #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
    3878 
     4015    #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)
    38794016
    38804017    if verbose:
     
    38824019        print 'More Statistics:'
    38834020        print '  Extent (/lon):'
    3884         print '    x in [%f, %f], len(lat) == %d'\
    3885               %(min(x), max(x),
    3886                 len(x))
    3887         print '    y in [%f, %f], len(lon) == %d'\
    3888               %(min(y), max(y),
    3889                 len(y))
    3890         print 'geo_ref: ',geo_ref
     4021        print '    x in [%f, %f], len(lat) == %d' \
     4022              % (min(x), max(x), len(x))
     4023        print '    y in [%f, %f], len(lon) == %d' \
     4024              % (min(y), max(y), len(y))
     4025        print 'geo_ref: ', geo_ref
    38914026
    38924027    z = resize(bath_grid,outfile.variables['z'][:].shape)
    38934028    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    38944029    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    3895     outfile.variables['z'][:] = z # FIXME (Ole): Remove once viewer has been recompiled and changed to use elevation instead of z
    3896     outfile.variables['elevation'][:] = z 
     4030# FIXME (Ole): Remove once viewer has been recompiled and changed
     4031#              to use elevation instead of z
     4032    outfile.variables['z'][:] = z
     4033    outfile.variables['elevation'][:] = z
    38974034    outfile.variables['volumes'][:] = volumes.astype(Int32) # On Opteron 64
    38984035
     
    39044041
    39054042    if verbose: print 'Converting quantities'
     4043
    39064044    n = number_of_times
    39074045    for j in range(number_of_times):
    39084046        # load in files
    39094047        elevation_meta, elevation_grid = \
    3910            _read_asc(elevation_dir + os.sep + elevation_files[j])
    3911 
    3912         _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
    3913         _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
     4048            _read_asc(elevation_dir + os.sep + elevation_files[j])
     4049
     4050        _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
     4051        _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])
    39144052
    39154053        #cut matrix to desired size
     
    39174055        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
    39184056        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
    3919        
     4057
    39204058        # handle missing values
    39214059        missing = (elevation_grid == elevation_meta['NODATA_value'])
    39224060        if sometrue (missing):
    39234061            if fail_on_NaN:
    3924                 msg = 'File %s contains missing values'\
    3925                       %(elevation_files[j])
     4062                msg = 'File %s contains missing values' \
     4063                      % (elevation_files[j])
    39264064                raise DataMissingValuesError, msg
    39274065            else:
    3928                 elevation_grid = elevation_grid*(missing==0) + \
    3929                                  missing*elevation_NaN_filler
    3930 
    3931 
    3932         if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
     4066                elevation_grid = elevation_grid*(missing==0) \
     4067                                 + missing*elevation_NaN_filler
     4068
     4069        if verbose and j % ((n+10)/10) == 0: print '  Doing %d of %d' % (j, n)
     4070
    39334071        i = 0
    39344072        for k in range(number_of_latitudes):      #Y direction
     
    39404078                ymomentum[j,i] = v_momentum_grid[k,l]*h
    39414079                i += 1
     4080
    39424081    outfile.close()
    39434082
     4083
     4084##
     4085# @brief Return max&min indexes (for slicing) of area specified.
     4086# @param latitudes_ref ??
     4087# @param longitudes_ref ??
     4088# @param minlat Minimum latitude of specified area.
     4089# @param maxlat Maximum latitude of specified area.
     4090# @param minlon Minimum longitude of specified area.
     4091# @param maxlon Maximum longitude of specified area.
     4092# @return Tuple (lat_min_index, lat_max_index, lon_min_index, lon_max_index)
    39444093def _get_min_max_indexes(latitudes_ref,longitudes_ref,
    3945                         minlat=None, maxlat=None,
    3946                         minlon=None, maxlon=None):
    3947     """
    3948     return max, min indexes (for slicing) of the lat and long arrays to cover the area
    3949     specified with min/max lat/long
     4094                         minlat=None, maxlat=None,
     4095                         minlon=None, maxlon=None):
     4096    """
     4097    Return max, min indexes (for slicing) of the lat and long arrays to cover
     4098    the area specified with min/max lat/long.
    39504099
    39514100    Think of the latitudes and longitudes describing a 2d surface.
     
    39584107    assert latitudes are sorted, ascending or decending
    39594108    """
     4109
    39604110    latitudes = latitudes_ref[:]
    39614111    longitudes = longitudes_ref[:]
     
    39694119    #print len(latitudes),len(longitudes)
    39704120    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
    3971    
     4121
    39724122    lat_ascending = True
    39734123    if not allclose(sort(latitudes), latitudes):
    39744124        lat_ascending = False
    3975         # reverse order of lat, so it's in ascending order         
     4125        # reverse order of lat, so it's in ascending order
    39764126        latitudes = latitudes[::-1]
    39774127        assert allclose(sort(latitudes), latitudes)
    3978     #print "latitudes  in funct", latitudes
    3979    
     4128
    39804129    largest_lat_index = len(latitudes)-1
     4130
    39814131    #Cut out a smaller extent.
    39824132    if minlat == None:
     
    39874137            lat_min_index = 0
    39884138
    3989 
    39904139    if maxlat == None:
    39914140        lat_max_index = largest_lat_index #len(latitudes)
     
    40094158    # Reversing the indexes, if the lat array is decending
    40104159    if lat_ascending is False:
    4011         lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
     4160        lat_min_index, lat_max_index = largest_lat_index - lat_max_index, \
    40124161                                       largest_lat_index - lat_min_index
    40134162    lat_max_index = lat_max_index + 1 # taking into account how slicing works
     
    40174166
    40184167
     4168##
     4169# @brief Read an ASC file.
     4170# @parem filename Path to the file to read.
     4171# @param verbose True if this function is to be verbose.
    40194172def _read_asc(filename, verbose=False):
    40204173    """Read esri file from the following ASCII format (.asc)
     
    40294182    NODATA_value  -9999
    40304183    138.3698 137.4194 136.5062 135.5558 ..........
    4031 
    40324184    """
    40334185
    40344186    datafile = open(filename)
    40354187
    4036     if verbose: print 'Reading DEM from %s' %(filename)
     4188    if verbose: print 'Reading DEM from %s' % filename
     4189
    40374190    lines = datafile.readlines()
    40384191    datafile.close()
     
    40654218
    40664219
    4067 
    40684220    ####  URS 2 SWW  ###
    40694221
     4222# Definitions of various NetCDF dimension names, etc.
    40704223lon_name = 'LON'
    40714224lat_name = 'LAT'
    40724225time_name = 'TIME'
    4073 precision = Float # So if we want to change the precision its done here       
     4226precision = Float # So if we want to change the precision its done here
     4227
     4228##
     4229# @brief Clas for a NetCDF data file writer.
    40744230class Write_nc:
    4075     """
    4076     Write an nc file.
    4077    
     4231    """Write an nc file.
     4232
    40784233    Note, this should be checked to meet cdc netcdf conventions for gridded
    40794234    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
    4080    
    4081     """
     4235    """
     4236
     4237    ##
     4238    # @brief Instantiate a Write_nc instance.
     4239    # @param quantity_name
     4240    # @param file_name
     4241    # @param time_step_count The number of time steps.
     4242    # @param time_step The time_step size.
     4243    # @param lon
     4244    # @param lat
    40824245    def __init__(self,
    40834246                 quantity_name,
     
    40874250                 lon,
    40884251                 lat):
    4089         """
     4252        """Instantiate a Write_nc instance (NetCDF file writer).
     4253
    40904254        time_step_count is the number of time steps.
    40914255        time_step is the time step size
    4092        
    4093         pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
     4256
     4257        pre-condition: quantity_name must be 'HA', 'UA'or 'VA'.
    40944258        """
     4259
    40954260        self.quantity_name = quantity_name
    40964261        quantity_units = {'HA':'CENTIMETERS',
    4097                               'UA':'CENTIMETERS/SECOND',
    4098                               'VA':'CENTIMETERS/SECOND'}       
    4099        
    4100         multiplier_dic = {'HA':100.0, # To convert from m to cm
    4101                               'UA':100.0,  #  m/s to cm/sec
    4102                               'VA':-100.0}  # MUX files have positve x in the
    4103         # Southern direction.  This corrects for it, when writing nc files.
    4104        
     4262                          'UA':'CENTIMETERS/SECOND',
     4263                          'VA':'CENTIMETERS/SECOND'}
     4264
     4265        multiplier_dic = {'HA':100.0,   # To convert from m to cm
     4266                          'UA':100.0,   #             and m/s to cm/sec
     4267                          'VA':-100.0}  # MUX files have positive x in the
     4268                                        # Southern direction.  This corrects
     4269                                        # for it, when writing nc files.
     4270
    41054271        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
    4106        
     4272
    41074273        #self.file_name = file_name
    41084274        self.time_step_count = time_step_count
     
    41114277        # NetCDF file definition
    41124278        self.outfile = NetCDFFile(file_name, 'w')
    4113         outfile = self.outfile       
     4279        outfile = self.outfile
    41144280
    41154281        #Create new file
    41164282        nc_lon_lat_header(outfile, lon, lat)
    4117    
     4283
    41184284        # TIME
    41194285        outfile.createDimension(time_name, None)
     
    41234289        outfile.createVariable(self.quantity_name, precision,
    41244290                               (time_name, lat_name, lon_name))
    4125         outfile.variables[self.quantity_name].missing_value=-1.e+034
    4126         outfile.variables[self.quantity_name].units= \
     4291        outfile.variables[self.quantity_name].missing_value = -1.e+034
     4292        outfile.variables[self.quantity_name].units = \
    41274293                                 quantity_units[self.quantity_name]
    41284294        outfile.variables[lon_name][:]= ensure_numeric(lon)
     
    41314297        #Assume no one will be wanting to read this, while we are writing
    41324298        #outfile.close()
    4133        
     4299
     4300    ##
     4301    # @brief Write a time-step of quantity data.
     4302    # @param quantity_slice The data to be stored for this time-step.
    41344303    def store_timestep(self, quantity_slice):
    4135         """
    4136         Write a time slice of quantity info
     4304        """Write a time slice of quantity info
     4305
    41374306        quantity_slice is the data to be stored at this time step
    41384307        """
    4139        
    4140         outfile = self.outfile
    4141        
     4308
    41424309        # Get the variables
    4143         time = outfile.variables[time_name]
    4144         quantity = outfile.variables[self.quantity_name]
    4145            
     4310        time = self.outfile.variables[time_name]
     4311        quantity = self.outfile.variables[self.quantity_name]
     4312
     4313        # get index oflice to write
    41464314        i = len(time)
    41474315
    41484316        #Store time
    4149         time[i] = i*self.time_step #self.domain.time
    4150         quantity[i,:] = quantity_slice* self.quantity_multiplier
    4151        
     4317        time[i] = i * self.time_step    #self.domain.time
     4318        quantity[i,:] = quantity_slice * self.quantity_multiplier
     4319
     4320    ##
     4321    # @brief Close file underlying the class instance.
    41524322    def close(self):
    41534323        self.outfile.close()
    41544324
     4325
     4326##
     4327# @brief Convert URS file to SWW file.
     4328# @param basename_in Stem of the input filename.
     4329# @param basename_out Stem of the output filename.
     4330# @param verbose True if this function is to be verbose.
     4331# @param remove_nc_files
     4332# @param minlat Sets extent of area to be used.  If not supplied, full extent.
     4333# @param maxlat Sets extent of area to be used.  If not supplied, full extent.
     4334# @param minlon Sets extent of area to be used.  If not supplied, full extent.
     4335# @param maxlon Sets extent of area to be used.  If not supplied, full extent.
     4336# @param mint
     4337# @param maxt
     4338# @param mean_stage
     4339# @param origin A 3-tuple with geo referenced UTM coordinates
     4340# @param zscale
     4341# @param fail_on_NaN
     4342# @param NaN_filler
     4343# @param elevation
     4344# @note Also convert latitude and longitude to UTM. All coordinates are
     4345#       assumed to be given in the GDA94 datum.
    41554346def urs2sww(basename_in='o', basename_out=None, verbose=False,
    41564347            remove_nc_files=True,
    41574348            minlat=None, maxlat=None,
    4158             minlon= None, maxlon=None,
     4349            minlon=None, maxlon=None,
    41594350            mint=None, maxt=None,
    41604351            mean_stage=0,
    4161             origin = None,
     4352            origin=None,
    41624353            zscale=1,
    41634354            fail_on_NaN=True,
    41644355            NaN_filler=0,
    41654356            elevation=None):
    4166     """
     4357    """Convert a URS file to an SWW file.
    41674358    Convert URS C binary format for wave propagation to
    41684359    sww format native to abstract_2d_finite_volumes.
     
    41794370    min's and max's: If omitted - full extend is used.
    41804371    To include a value min may equal it, while max must exceed it.
    4181     Lat and lon are assumed to be in decimal degrees. 
     4372    Lat and lon are assumed to be in decimal degrees.
    41824373    NOTE: minlon is the most east boundary.
    4183    
     4374
    41844375    origin is a 3-tuple with geo referenced
    41854376    UTM coordinates (zone, easting, northing)
     
    41874378    since all of anuga should be able to handle an arbitary origin.
    41884379
    4189 
    41904380    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
    41914381    which means that latitude is the fastest
     
    41944384    In URS C binary the latitudes and longitudes are in assending order.
    41954385    """
     4386
    41964387    if basename_out == None:
    41974388        basename_out = basename_in
     4389
    41984390    files_out = urs2nc(basename_in, basename_out)
     4391
    41994392    ferret2sww(basename_out,
    42004393               minlat=minlat,
     
    42114404               inverted_bathymetry=True,
    42124405               verbose=verbose)
    4213     #print "files_out",files_out
     4406   
    42144407    if remove_nc_files:
    42154408        for file_out in files_out:
    42164409            os.remove(file_out)
    4217    
    4218 def urs2nc(basename_in = 'o', basename_out = 'urs'):
    4219     """
    4220     Convert the 3 urs files to 4 nc files.
     4410
     4411
     4412##
     4413# @brief Convert 3 URS files back to 4 NC files.
     4414# @param basename_in Stem of the input filenames.
     4415# @param basename_outStem of the output filenames.
     4416# @note The name of the urs file names must be:
     4417#          [basename_in]-z-mux
     4418#          [basename_in]-e-mux
     4419#          [basename_in]-n-mux
     4420def urs2nc(basename_in='o', basename_out='urs'):
     4421    """Convert the 3 urs files to 4 nc files.
    42214422
    42224423    The name of the urs file names must be;
     
    42244425    [basename_in]-e-mux
    42254426    [basename_in]-n-mux
    4226    
    4227     """
    4228    
     4427    """
     4428
    42294429    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
    42304430                basename_in + EAST_VELOCITY_LABEL,
    42314431                basename_in + NORTH_VELOCITY_LABEL]
    4232     files_out = [basename_out+'_ha.nc',
    4233                  basename_out+'_ua.nc',
    4234                  basename_out+'_va.nc']
    4235     quantities = ['HA','UA','VA']
     4432    files_out = [basename_out + '_ha.nc',
     4433                 basename_out + '_ua.nc',
     4434                 basename_out + '_va.nc']
     4435    quantities = ['HA', 'UA', 'VA']
    42364436
    42374437    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
    42384438    for i, file_name in enumerate(files_in):
    42394439        if os.access(file_name, os.F_OK) == 0:
    4240             if os.access(file_name+'.mux', os.F_OK) == 0 :
    4241                 msg = 'File %s does not exist or is not accessible' %file_name
     4440            if os.access(file_name + '.mux', os.F_OK) == 0 :
     4441                msg = 'File %s does not exist or is not accessible' % file_name
    42424442                raise IOError, msg
    42434443            else:
    42444444               files_in[i] += '.mux'
    42454445               print "file_name", file_name
     4446
    42464447    hashed_elevation = None
    42474448    for file_in, file_out, quantity in map(None, files_in,
     
    42494450                                           quantities):
    42504451        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
    4251                                          file_out,
    4252                                          quantity)
    4253         #print "lonlatdep", lonlatdep
     4452                                                  file_out,
     4453                                                  quantity)
    42544454        if hashed_elevation == None:
    4255             elevation_file = basename_out+'_e.nc'
     4455            elevation_file = basename_out + '_e.nc'
    42564456            write_elevation_nc(elevation_file,
    4257                                 lon,
    4258                                 lat,
    4259                                 depth)
     4457                               lon,
     4458                               lat,
     4459                               depth)
    42604460            hashed_elevation = myhash(lonlatdep)
    42614461        else:
    42624462            msg = "The elevation information in the mux files is inconsistent"
    42634463            assert hashed_elevation == myhash(lonlatdep), msg
     4464
    42644465    files_out.append(elevation_file)
     4466
    42654467    return files_out
    4266    
     4468
     4469
     4470##
     4471# @brief Convert a quantity URS file to a NetCDF file.
     4472# @param file_in Path to input URS file.
     4473# @param file_out Path to the output file.
     4474# @param quantity Name of the quantity to be written to the output file.
     4475# @return A tuple (lonlatdep, lon, lat, depth).
    42674476def _binary_c2nc(file_in, file_out, quantity):
    4268     """
    4269     Reads in a quantity urs file and writes a quantity nc file.
    4270     additionally, returns the depth and lat, long info,
     4477    """Reads in a quantity urs file and writes a quantity nc file.
     4478    Additionally, returns the depth and lat, long info,
    42714479    so it can be written to a file.
    42724480    """
    4273     columns = 3 # long, lat , depth
     4481
     4482    columns = 3                           # long, lat , depth
    42744483    mux_file = open(file_in, 'rb')
    4275    
     4484
    42764485    # Number of points/stations
    4277     (points_num,)= unpack('i',mux_file.read(4))
     4486    (points_num,) = unpack('i', mux_file.read(4))
    42784487
    42794488    # nt, int - Number of time steps
    4280     (time_step_count,)= unpack('i',mux_file.read(4))
     4489    (time_step_count,) = unpack('i', mux_file.read(4))
    42814490
    42824491    #dt, float - time step, seconds
    42834492    (time_step,) = unpack('f', mux_file.read(4))
    4284    
     4493
    42854494    msg = "Bad data in the mux file."
    42864495    if points_num < 0:
     
    42934502        mux_file.close()
    42944503        raise ANUGAError, msg
    4295    
     4504
    42964505    lonlatdep = p_array.array('f')
    42974506    lonlatdep.read(mux_file, columns * points_num)
    4298     lonlatdep = array(lonlatdep, typecode=Float)   
     4507    lonlatdep = array(lonlatdep, typecode=Float)
    42994508    lonlatdep = reshape(lonlatdep, (points_num, columns))
    4300    
     4509
    43014510    lon, lat, depth = lon_lat2grid(lonlatdep)
    43024511    lon_sorted = list(lon)
     
    43064515        msg = "Longitudes in mux file are not in ascending order"
    43074516        raise IOError, msg
     4517
    43084518    lat_sorted = list(lat)
    43094519    lat_sorted.sort()
    43104520
    4311     if not lat == lat_sorted:
    4312         msg = "Latitudes in mux file are not in ascending order"
    4313    
     4521# UNUSED?
     4522##    if not lat == lat_sorted:
     4523##        msg = "Latitudes in mux file are not in ascending order"
     4524
    43144525    nc_file = Write_nc(quantity,
    43154526                       file_out,
     
    43204531
    43214532    for i in range(time_step_count):
    4322         #Read in a time slice  from mux file 
     4533        #Read in a time slice from mux file
    43234534        hz_p_array = p_array.array('f')
    43244535        hz_p_array.read(mux_file, points_num)
    43254536        hz_p = array(hz_p_array, typecode=Float)
    43264537        hz_p = reshape(hz_p, (len(lon), len(lat)))
    4327         hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
     4538        hz_p = transpose(hz_p)  # mux has lat varying fastest, nc has long v.f.
    43284539
    43294540        #write time slice to nc file
    43304541        nc_file.store_timestep(hz_p)
     4542
    43314543    mux_file.close()
    43324544    nc_file.close()
    43334545
    43344546    return lonlatdep, lon, lat, depth
    4335    
    4336 
     4547
     4548
     4549##
     4550# @brief Write an NC elevation file.
     4551# @param file_out Path to the output file.
     4552# @param lon ??
     4553# @param lat ??
     4554# @param depth_vector The elevation data to write.
    43374555def write_elevation_nc(file_out, lon, lat, depth_vector):
    4338     """
    4339     Write an nc elevation file.
    4340     """
    4341    
     4556    """Write an nc elevation file."""
     4557
    43424558    # NetCDF file definition
    43434559    outfile = NetCDFFile(file_out, 'w')
     
    43454561    #Create new file
    43464562    nc_lon_lat_header(outfile, lon, lat)
    4347    
     4563
    43484564    # ELEVATION
    43494565    zname = 'ELEVATION'
    43504566    outfile.createVariable(zname, precision, (lat_name, lon_name))
    4351     outfile.variables[zname].units='CENTIMETERS'
    4352     outfile.variables[zname].missing_value=-1.e+034
    4353 
    4354     outfile.variables[lon_name][:]= ensure_numeric(lon)
    4355     outfile.variables[lat_name][:]= ensure_numeric(lat)
    4356 
    4357     depth = reshape(depth_vector, ( len(lat), len(lon)))
    4358     outfile.variables[zname][:]= depth
    4359    
     4567    outfile.variables[zname].units = 'CENTIMETERS'
     4568    outfile.variables[zname].missing_value = -1.e+034
     4569
     4570    outfile.variables[lon_name][:] = ensure_numeric(lon)
     4571    outfile.variables[lat_name][:] = ensure_numeric(lat)
     4572
     4573    depth = reshape(depth_vector, (len(lat), len(lon)))
     4574    outfile.variables[zname][:] = depth
     4575
    43604576    outfile.close()
    4361    
     4577
     4578
     4579##
     4580# @brief Write lat/lon headers to a NetCDF file.
     4581# @param outfile Handle to open file to write to.
     4582# @param lon An iterable of the longitudes.
     4583# @param lat An iterable of the latitudes.
     4584# @note Defines lat/long dimensions and variables. Sets various attributes:
     4585#          .point_spacing  and  .units
     4586#       and writes lat/lon data.
     4587
    43624588def nc_lon_lat_header(outfile, lon, lat):
    4363     """
     4589    """Write lat/lon headers to a NetCDF file.
     4590
    43644591    outfile is the netcdf file handle.
    43654592    lon - a list/array of the longitudes
    43664593    lat - a list/array of the latitudes
    43674594    """
    4368    
     4595
    43694596    outfile.institution = 'Geoscience Australia'
    43704597    outfile.description = 'Converted from URS binary C'
    4371    
     4598
    43724599    # Longitude
    43734600    outfile.createDimension(lon_name, len(lon))
    43744601    outfile.createVariable(lon_name, precision, (lon_name,))
    4375     outfile.variables[lon_name].point_spacing='uneven'
    4376     outfile.variables[lon_name].units='degrees_east'
     4602    outfile.variables[lon_name].point_spacing = 'uneven'
     4603    outfile.variables[lon_name].units = 'degrees_east'
    43774604    outfile.variables[lon_name].assignValue(lon)
    4378 
    43794605
    43804606    # Latitude
    43814607    outfile.createDimension(lat_name, len(lat))
    43824608    outfile.createVariable(lat_name, precision, (lat_name,))
    4383     outfile.variables[lat_name].point_spacing='uneven'
    4384     outfile.variables[lat_name].units='degrees_north'
     4609    outfile.variables[lat_name].point_spacing = 'uneven'
     4610    outfile.variables[lat_name].units = 'degrees_north'
    43854611    outfile.variables[lat_name].assignValue(lat)
    43864612
    43874613
    4388    
     4614##
     4615# @brief
     4616# @param long_lat_dep
     4617# @return A tuple (long, lat, quantity).
     4618# @note The latitude is the fastest varying dimension - in mux files.
    43894619def lon_lat2grid(long_lat_dep):
    43904620    """
     
    43974627    The latitude is the fastest varying dimension - in mux files
    43984628    """
     4629
    43994630    LONG = 0
    44004631    LAT = 1
     
    44024633
    44034634    long_lat_dep = ensure_numeric(long_lat_dep, Float)
    4404    
     4635
    44054636    num_points = long_lat_dep.shape[0]
    44064637    this_rows_long = long_lat_dep[0,LONG]
     
    44104641    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
    44114642        i += 1
     4643
    44124644    # determine the lats and longsfrom the grid
    4413     lat = long_lat_dep[:i, LAT]       
     4645    lat = long_lat_dep[:i, LAT]
    44144646    long = long_lat_dep[::i, LONG]
    4415    
     4647
    44164648    lenlong = len(long)
    44174649    lenlat = len(lat)
    4418     #print 'len lat', lat, len(lat)
    4419     #print 'len long', long, len(long)
    4420          
    4421     msg = 'Input data is not gridded'     
     4650
     4651    msg = 'Input data is not gridded'
    44224652    assert num_points % lenlat == 0, msg
    44234653    assert num_points % lenlong == 0, msg
    4424          
    4425     # Test that data is gridded       
     4654
     4655    # Test that data is gridded
    44264656    for i in range(lenlong):
    44274657        msg = 'Data is not gridded.  It must be for this operation'
    4428         first = i*lenlat
     4658        first = i * lenlat
    44294659        last = first + lenlat
    4430                
     4660
    44314661        assert allclose(long_lat_dep[first:last,LAT], lat), msg
    44324662        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
    4433    
    4434    
    4435 #    print 'range long', min(long), max(long)
    4436 #    print 'range lat', min(lat), max(lat)
    4437 #    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
    4438 #    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
    4439    
    4440    
    4441    
     4663
    44424664    msg = 'Out of range latitudes/longitudes'
    44434665    for l in lat:assert -90 < l < 90 , msg
     
    44484670    # FIXME - make this faster/do this a better way
    44494671    # use numeric transpose, after reshaping the quantity vector
    4450 #    quantity = zeros(len(long_lat_dep), Float)
    44514672    quantity = zeros(num_points, Float)
    4452    
    4453 #    print 'num',num_points
     4673
    44544674    for lat_i, _ in enumerate(lat):
    44554675        for long_i, _ in enumerate(long):
    4456             q_index = lat_i*lenlong+long_i
    4457             lld_index = long_i*lenlat+lat_i
    4458 #            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
     4676            q_index = lat_i*lenlong + long_i
     4677            lld_index = long_i*lenlat + lat_i
    44594678            temp = long_lat_dep[lld_index, QUANTITY]
    44604679            quantity[q_index] = temp
    4461            
     4680
    44624681    return long, lat, quantity
    44634682
    4464 ####  END URS 2 SWW  ###
    4465 
    4466 #### URS UNGRIDDED 2 SWW ###
     4683################################################################################
     4684# END URS 2 SWW
     4685################################################################################
     4686
     4687################################################################################
     4688# URS UNGRIDDED 2 SWW
     4689################################################################################
    44674690
    44684691### PRODUCING THE POINTS NEEDED FILE ###
     
    44754698#LONG_AMOUNT = 3600
    44764699
     4700
     4701##
     4702# @brief
     4703# @param file_name
     4704# @param boundary_polygon
     4705# @param zone
     4706# @param ll_lat
     4707# @param ll_long
     4708# @param grid_spacing
     4709# @param lat_amount
     4710# @param long_amount
     4711# @param isSouthernHemisphere
     4712# @param export_csv
     4713# @param use_cache
     4714# @param verbose True if this function is to be verbose.
     4715# @return
    44774716def URS_points_needed_to_file(file_name, boundary_polygon, zone,
    44784717                              ll_lat, ll_long,
    4479                               grid_spacing, 
     4718                              grid_spacing,
    44804719                              lat_amount, long_amount,
    44814720                              isSouthernHemisphere=True,
     
    44894728    1st line is the number of points,
    44904729    each line after represents a point,in lats and longs.
    4491    
     4730
    44924731    Note: The polygon cannot cross zones or hemispheres.
    44934732
    44944733    A work-a-round for different zones or hemispheres is to run this twice,
    44954734    once for each zone, and then combine the output.
    4496    
     4735
    44974736    file_name - name of the urs file produced for David.
    44984737    boundary_polygon - a list of points that describes a polygon.
     
    45014740
    45024741     This is info about the URS model that needs to be inputted.
    4503      
     4742
    45044743    ll_lat - lower left latitude, in decimal degrees
    45054744    ll-long - lower left longitude, in decimal degrees
    45064745    grid_spacing - in deciamal degrees
    4507     lat_amount - number of latitudes
    4508     long_amount- number of longs
    4509 
     4746    lat_amount - number of latitudes
     4747    long_amount- number of longs
    45104748
    45114749    Don't add the file extension.  It will be added.
    45124750    """
     4751
    45134752    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
    4514                             grid_spacing, 
     4753                            grid_spacing,
    45154754                            lat_amount, long_amount, isSouthernHemisphere,
    45164755                            use_cache, verbose)
     4756
    45174757    if not file_name[-4:] == ".urs":
    45184758        file_name += ".urs"
     4759
    45194760    geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)
     4761
    45204762    if export_csv:
    45214763        if file_name[-4:] == ".urs":
    45224764            file_name = file_name[:-4] + ".csv"
    45234765        geo.export_points_file(file_name)
     4766
    45244767    return geo
    45254768
     4769
     4770##
     4771# @brief
     4772# @param boundary_polygon
     4773# @param zone
     4774# @param ll_lat
     4775# @param ll_long
     4776# @param grid_spacing
     4777# @param lat_amount
     4778# @param long_amount
     4779# @param isSouthHemisphere
     4780# @param use_cache
     4781# @param verbose
    45264782def URS_points_needed(boundary_polygon, zone, ll_lat,
    4527                       ll_long, grid_spacing, 
     4783                      ll_long, grid_spacing,
    45284784                      lat_amount, long_amount, isSouthHemisphere=True,
    45294785                      use_cache=False, verbose=False):
    45304786    args = (boundary_polygon,
    45314787            zone, ll_lat,
    4532             ll_long, grid_spacing, 
     4788            ll_long, grid_spacing,
    45334789            lat_amount, long_amount, isSouthHemisphere)
    4534     kwargs = {} 
     4790    kwargs = {}
     4791
    45354792    if use_cache is True:
    45364793        try:
    45374794            from anuga.caching import cache
    45384795        except:
    4539             msg = 'Caching was requested, but caching module'+\
     4796            msg = 'Caching was requested, but caching module' \
    45404797                  'could not be imported'
    45414798            raise msg
    45424799
    4543 
    45444800        geo = cache(_URS_points_needed,
    4545                   args, kwargs,
    4546                   verbose=verbose,
    4547                   compression=False)
     4801                    args, kwargs,
     4802                    verbose=verbose,
     4803                    compression=False)
    45484804    else:
    45494805        geo = apply(_URS_points_needed, args, kwargs)
     
    45514807    return geo
    45524808
     4809
     4810##
     4811# @brief
     4812# @param boundary_polygon An iterable of points that describe a polygon.
     4813# @param zone
     4814# @param ll_lat Lower left latitude, in decimal degrees
     4815# @param ll_long Lower left longitude, in decimal degrees
     4816# @param grid_spacing Grid spacing in decimal degrees.
     4817# @param lat_amount
     4818# @param long_amount
     4819# @param isSouthHemisphere
    45534820def _URS_points_needed(boundary_polygon,
    4554                       zone, ll_lat,
    4555                       ll_long, grid_spacing,
    4556                       lat_amount, long_amount,
     4821                       zone, ll_lat,
     4822                       ll_long, grid_spacing,
     4823                       lat_amount, long_amount,
    45574824                       isSouthHemisphere):
    45584825    """
     
    45634830    ll_lat - lower left latitude, in decimal degrees
    45644831    ll-long - lower left longitude, in decimal degrees
    4565     grid_spacing - in deciamal degrees
    4566 
    4567     """
    4568    
     4832    grid_spacing - in decimal degrees
     4833
     4834    """
     4835
    45694836    from sets import ImmutableSet
    4570    
     4837
    45714838    msg = "grid_spacing can not be zero"
    4572     assert not grid_spacing == 0, msg
     4839    assert not grid_spacing == 0, msg
     4840
    45734841    a = boundary_polygon
     4842
    45744843    # List of segments.  Each segment is two points.
    45754844    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
     4845
    45764846    # convert the segs to Lat's and longs.
    4577    
    45784847    # Don't assume the zone of the segments is the same as the lower left
    45794848    # corner of the lat long data!!  They can easily be in different zones
    4580    
    45814849    lat_long_set = ImmutableSet()
    45824850    for seg in segs:
    4583         points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing,
    4584                       lat_amount, long_amount, zone, isSouthHemisphere)
     4851        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing,
     4852                                        lat_amount, long_amount, zone,
     4853                                        isSouthHemisphere)
    45854854        lat_long_set |= ImmutableSet(points_lat_long)
     4855
    45864856    if lat_long_set == ImmutableSet([]):
    4587         msg = """URS region specified and polygon does not overlap."""
     4857        msg = "URS region specified and polygon does not overlap."
    45884858        raise ValueError, msg
    45894859
     
    45914861    # these points.  There should be.
    45924862    geo = Geospatial_data(data_points=list(lat_long_set),
    4593                               points_are_lats_longs=True)
     4863                          points_are_lats_longs=True)
     4864
    45944865    return geo
    4595    
    4596 def points_needed(seg, ll_lat, ll_long, grid_spacing,
     4866
     4867
     4868##
     4869# @brief Get the points that are needed to interpolate any point a a segment.
     4870# @param seg Two points in the UTM.
     4871# @param ll_lat Lower left latitude, in decimal degrees
     4872# @param ll_long Lower left longitude, in decimal degrees
     4873# @param grid_spacing
     4874# @param lat_amount
     4875# @param long_amount
     4876# @param zone
     4877# @param isSouthHemisphere
     4878# @return A list of points.
     4879def points_needed(seg, ll_lat, ll_long, grid_spacing,
    45974880                  lat_amount, long_amount, zone,
    45984881                  isSouthHemisphere):
     
    46024885    interpolate any point on the segment.
    46034886    """
     4887
    46044888    from math import sqrt
    4605     #print "zone",zone
     4889
    46064890    geo_reference = Geo_reference(zone=zone)
    4607     #print "seg", seg
    4608     geo = Geospatial_data(seg,geo_reference=geo_reference)
     4891    geo = Geospatial_data(seg, geo_reference=geo_reference)
    46094892    seg_lat_long = geo.get_data_points(as_lat_long=True,
    46104893                                       isSouthHemisphere=isSouthHemisphere)
    4611     #print "seg_lat_long", seg_lat_long
     4894
    46124895    # 1.415 = 2^0.5, rounded up....
    46134896    sqrt_2_rounded_up = 1.415
    46144897    buffer = sqrt_2_rounded_up * grid_spacing
    4615    
     4898
    46164899    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
    46174900    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
     
    46194902    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
    46204903
    4621     #print "min_long", min_long
    4622     #print "ll_long", ll_long
    4623     #print "grid_spacing", grid_spacing
    4624     first_row = (min_long - ll_long)/grid_spacing
     4904    first_row = (min_long - ll_long) / grid_spacing
     4905
    46254906    # To round up
    46264907    first_row_long = int(round(first_row + 0.5))
    4627     #print "first_row", first_row_long
    4628 
    4629     last_row = (max_long - ll_long)/grid_spacing # round down
     4908
     4909    last_row = (max_long - ll_long) / grid_spacing # round down
    46304910    last_row_long = int(round(last_row))
    4631     #print "last_row",last_row _long
    4632    
    4633     first_row = (min_lat - ll_lat)/grid_spacing
     4911
     4912    first_row = (min_lat - ll_lat) / grid_spacing
    46344913    # To round up
    46354914    first_row_lat = int(round(first_row + 0.5))
    4636     #print "first_row", first_row_lat
    4637 
    4638     last_row = (max_lat - ll_lat)/grid_spacing # round down
     4915
     4916    last_row = (max_lat - ll_lat) / grid_spacing # round down
    46394917    last_row_lat = int(round(last_row))
    4640     #print "last_row",last_row_lat
    4641 
    4642     # to work out the max distance -
    4643     # 111120 - horizontal distance between 1 deg latitude.
    4644     #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
     4918
    46454919    max_distance = 157147.4112 * grid_spacing
    4646     #print "max_distance", max_distance #2619.12 m for 1 minute
    46474920    points_lat_long = []
     4921
    46484922    # Create a list of the lat long points to include.
    46494923    for index_lat in range(first_row_lat, last_row_lat + 1):
    46504924        for index_long in range(first_row_long, last_row_long + 1):
    4651             #print "index_lat", index_lat
    4652             #print "index_long", index_long
    46534925            lat = ll_lat + index_lat*grid_spacing
    46544926            long = ll_long + index_long*grid_spacing
     
    46574929            if keep_point(lat, long, seg, max_distance):
    46584930                points_lat_long.append((lat, long)) #must be hashable
    4659     #print "points_lat_long", points_lat_long
    46604931
    46614932    # Now that we have these points, lets throw ones out that are too far away
    46624933    return points_lat_long
    46634934
     4935
     4936##
     4937# @brief
     4938# @param lat
     4939# @param long
     4940# @param seg Two points in UTM.
     4941# @param max_distance
    46644942def keep_point(lat, long, seg, max_distance):
    46654943    """
    46664944    seg is two points, UTM
    46674945    """
     4946
    46684947    from math import sqrt
     4948
    46694949    _ , x0, y0 = redfearn(lat, long)
    46704950    x1 = seg[0][0]
     
    46784958            (x2_1)*(x2_1)+(y2_1)*(y2_1))
    46794959    except ZeroDivisionError:
    4680         #print "seg", seg
    4681         #print "x0", x0
    4682         #print "y0", y0
    4683         if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \
    4684            abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
     4960        if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 \
     4961           and abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
    46854962            return True
    46864963        else:
    46874964            return False
    4688    
    4689     if d <= max_distance:
    4690         return True
    4691     else:
    4692         return False
    4693    
    4694     #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
    4695    
     4965
     4966    return d <= max_distance
     4967
     4968################################################################################
     4969# CONVERTING UNGRIDDED URS DATA TO AN SWW FILE
     4970################################################################################
     4971
    46964972WAVEHEIGHT_MUX_LABEL = '-z-mux'
    46974973EAST_VELOCITY_LABEL =  '-e-mux'
    4698 NORTH_VELOCITY_LABEL =  '-n-mux'
     4974NORTH_VELOCITY_LABEL =  '-n-mux'
     4975
     4976##
     4977# @brief Convert URS file(s) (wave prop) to an SWW file.
     4978# @param basename_in Stem of the input filenames.
     4979# @param basename_out Path to the output SWW file.
     4980# @param verbose True if this function is to be verbose.
     4981# @param mint
     4982# @param maxt
     4983# @param mean_stage
     4984# @param origin Tuple with geo-ref UTM coordinates (zone, easting, northing).
     4985# @param hole_points_UTM
     4986# @param zscale
     4987# @note Also convert latitude and longitude to UTM. All coordinates are
     4988#       assumed to be given in the GDA94 datum.
     4989# @note Input filename stem has suffixes '-z-mux', '-e-mux' and '-n-mux'
     4990#       added for relative height, x-velocity and y-velocity respectively.
    46994991def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
    47004992                      mint=None, maxt=None,
     
    47034995                      hole_points_UTM=None,
    47044996                      zscale=1):
    4705     """   
     4997    """
    47064998    Convert URS C binary format for wave propagation to
    47074999    sww format native to abstract_2d_finite_volumes.
    4708 
    47095000
    47105001    Specify only basename_in and read files of the form
     
    47195010    min's and max's: If omitted - full extend is used.
    47205011    To include a value min ans max may equal it.
    4721     Lat and lon are assumed to be in decimal degrees. 
    4722    
     5012    Lat and lon are assumed to be in decimal degrees.
     5013
    47235014    origin is a 3-tuple with geo referenced
    47245015    UTM coordinates (zone, easting, northing)
     
    47275018    The mux point info is NOT relative to this origin.
    47285019
    4729 
    4730     URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
     5020    URS C binary format has data organised as TIME, LONGITUDE, LATITUDE
    47315021    which means that latitude is the fastest
    47325022    varying dimension (row major order, so to speak)
     
    47385028    function used, and the different grid structure between urs2sww
    47395029    and this function.
    4740    
     5030
    47415031    Interpolating data that has an underlying gridded source can
    47425032    easily end up with different values, depending on the underlying
     
    47505040    The grid can be
    47515041     -
    4752     |\|    A
     5042    |\|   A
    47535043     -
    47545044     or;
    4755       -
    4756      |/|   B
    47575045      -
    4758       If a point is just below the center of the midpoint, it will have a
    4759       +ve value in grid A and a -ve value in grid B.
    4760     """
     5046     |/|  B
     5047      -
     5048
     5049    If a point is just below the center of the midpoint, it will have a
     5050    +ve value in grid A and a -ve value in grid B.
     5051    """
     5052
    47615053    from anuga.mesh_engine.mesh_engine import NoTrianglesError
    47625054    from anuga.pmesh.mesh import Mesh
     
    47675059    quantities = ['HA','UA','VA']
    47685060
    4769     # instanciate urs_points of the three mux files.
     5061    # instantiate urs_points of the three mux files.
    47705062    mux = {}
    47715063    for quantity, file in map(None, quantities, files_in):
    47725064        mux[quantity] = Urs_points(file)
    4773        
     5065
    47745066    # Could check that the depth is the same. (hashing)
    47755067
    47765068    # handle to a mux file to do depth stuff
    47775069    a_mux = mux[quantities[0]]
    4778    
     5070
    47795071    # Convert to utm
    47805072    lat = a_mux.lonlatdep[:,1]
    47815073    long = a_mux.lonlatdep[:,0]
    4782     points_utm, zone = convert_from_latlon_to_utm( \
    4783         latitudes=lat, longitudes=long)
    4784     #print "points_utm", points_utm
    4785     #print "zone", zone
    4786 
    4787     elevation = a_mux.lonlatdep[:,2] * -1 #
    4788    
    4789     # grid ( create a mesh from the selected points)
     5074    points_utm, zone = convert_from_latlon_to_utm(latitudes=lat,
     5075                                                  longitudes=long)
     5076
     5077    elevation = a_mux.lonlatdep[:,2] * -1
     5078
     5079    # grid (create a mesh from the selected points)
    47905080    # This mesh has a problem.  Triangles are streched over ungridded areas.
    4791     #  If these areas could be described as holes in pmesh, that would be great
     5081    # If these areas could be described as holes in pmesh, that would be great.
    47925082
    47935083    # I can't just get the user to selection a point in the middle.
     
    47985088    mesh.add_vertices(points_utm)
    47995089    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
     5090
    48005091    # To try and avoid alpha shape 'hugging' too much
    4801     mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
     5092    mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    48025093    if hole_points_UTM is not None:
    48035094        point = ensure_absolute(hole_points_UTM)
    48045095        mesh.add_hole(point[0], point[1])
     5096
    48055097    try:
    48065098        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    48075099    except NoTrianglesError:
    4808         # This is a bit of a hack, going in and changing the
    4809         # data structure.
     5100        # This is a bit of a hack, going in and changing the data structure.
    48105101        mesh.holes = []
    48115102        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
     5103
    48125104    mesh_dic = mesh.Mesh2MeshList()
    48135105
    48145106    #mesh.export_mesh_file(basename_in + '_168.tsh')
    4815     #import sys; sys.exit() 
     5107    #import sys; sys.exit()
    48165108    # These are the times of the mux file
    48175109    mux_times = []
    48185110    for i in range(a_mux.time_step_count):
    4819         mux_times.append(a_mux.time_step * i) 
    4820     mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
     5111        mux_times.append(a_mux.time_step * i)
     5112    (mux_times_start_i, mux_times_fin_i) = mux2sww_time(mux_times, mint, maxt)
    48215113    times = mux_times[mux_times_start_i:mux_times_fin_i]
    4822    
     5114
    48235115    if mux_times_start_i == mux_times_fin_i:
    48245116        # Close the mux files
    48255117        for quantity, file in map(None, quantities, files_in):
    48265118            mux[quantity].close()
    4827         msg="Due to mint and maxt there's no time info in the boundary SWW."
     5119        msg = "Due to mint and maxt there's no time info in the boundary SWW."
    48285120        raise Exception, msg
    4829        
     5121
    48305122    # If this raise is removed there is currently no downstream errors
    4831            
     5123
    48325124    points_utm=ensure_numeric(points_utm)
    4833     assert ensure_numeric(mesh_dic['generatedpointlist']) == \
    4834            ensure_numeric(points_utm)
    4835    
     5125    assert ensure_numeric(mesh_dic['generatedpointlist']) \
     5126           == ensure_numeric(points_utm)
     5127
    48365128    volumes = mesh_dic['generatedtrianglelist']
    4837    
    4838     # write sww intro and grid stuff.   
     5129
     5130    # write sww intro and grid stuff.
    48395131    if basename_out is None:
    48405132        swwname = basename_in + '.sww'
     
    48435135
    48445136    if verbose: print 'Output to ', swwname
     5137
    48455138    outfile = NetCDFFile(swwname, 'w')
     5139
    48465140    # For a different way of doing this, check out tsh2sww
    48475141    # work out sww_times and the index range this covers
    48485142    sww = Write_sww()
    48495143    sww.store_header(outfile, times, len(volumes), len(points_utm),
    4850                      verbose=verbose,sww_precision=Float)
     5144                     verbose=verbose, sww_precision=Float)
    48515145    outfile.mean_stage = mean_stage
    48525146    outfile.zscale = zscale
     
    48555149                            elevation, zone,  new_origin=origin,
    48565150                            verbose=verbose)
    4857    
     5151
    48585152    if verbose: print 'Converting quantities'
     5153
     5154    # Read in a time slice from each mux file and write it to the SWW file
    48595155    j = 0
    4860     # Read in a time slice from each mux file and write it to the sww file
    48615156    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
    48625157        if j >= mux_times_start_i and j < mux_times_fin_i:
     
    48645159            h = stage - elevation
    48655160            xmomentum = ua*h
    4866             ymomentum = -1*va*h # -1 since in mux files south is positive.
    4867             sww.store_quantities(outfile, 
    4868                                  slice_index=j - mux_times_start_i,
     5161            ymomentum = -1 * va * h # -1 since in mux files south is positive.
     5162            sww.store_quantities(outfile,
     5163                                 slice_index=j-mux_times_start_i,
    48695164                                 verbose=verbose,
    48705165                                 stage=stage,
     
    48735168                                 sww_precision=Float)
    48745169        j += 1
     5170
    48755171    if verbose: sww.verbose_quantities(outfile)
     5172
    48765173    outfile.close()
    4877     #
     5174
    48785175    # Do some conversions while writing the sww file
    48795176
    4880     ##################################
    4881     # READ MUX2 FILES line of points #
    4882     ##################################
     5177
     5178################################################################################
     5179# READ MUX2 FILES line of points
     5180################################################################################
    48835181
    48845182WAVEHEIGHT_MUX2_LABEL = '-z-mux2'
    4885 EAST_VELOCITY_MUX2_LABEL =  '-e-mux2'
    4886 NORTH_VELOCITY_MUX2_LABEL =  '-n-mux2'   
    4887 
     5183EAST_VELOCITY_MUX2_LABEL = '-e-mux2'
     5184NORTH_VELOCITY_MUX2_LABEL = '-n-mux2'
     5185
     5186##
     5187# @brief
     5188# @param filenames List of mux2 format input filenames.
     5189# @param weights Weights associated with each source file.
     5190# @param permutation The gauge numbers for which data is extracted.
     5191# @param verbose True if this function is to be verbose.
     5192# @return (times, latitudes, longitudes, elevation, quantity, starttime)
    48885193def read_mux2_py(filenames,
    48895194                 weights=None,
    48905195                 permutation=None,
    48915196                 verbose=False):
    4892     """Access the mux files specified in the filenames list. Combine the
    4893        data found therin as a weighted linear sum as specifed by the weights.
    4894        If permutation is None or empty extract timeseries data for all gauges within the
    4895        files.
    4896          
    4897        Input:
    4898            filenames:   List of filenames specifiying the file containing the
    4899                         timeseries data (mux2 format) for each source
    4900            weights:     Weighs associated with each source (defaults to 1 for each source)
    4901            permutation: Specifies the gauge numbers that for which data is to be
    4902                         extracted
    4903     """
     5197    """Access the mux files specified in the filenames list. Combine the
     5198       data found therin as a weighted linear sum as specifed by the weights.
     5199       If permutation is None or empty extract timeseries data for all gauges
     5200       within the files.
     5201
     5202       Input:
     5203           filenames:   List of filenames specifiying the file containing the
     5204                        timeseries data (mux2 format) for each source
     5205           weights:     Weighs associated with each source
     5206                        (defaults to 1 for each source)
     5207           permutation: Specifies the gauge numbers that for which data is to be
     5208                        extracted
     5209    """
    49045210
    49055211    from Numeric import ones,Float,compress,zeros,arange
    49065212    from urs_ext import read_mux2
    49075213
    4908     numSrc=len(filenames)
    4909    
    4910     file_params=-1*ones(3,Float) #[nsta,dt,nt]
    4911    
     5214    numSrc = len(filenames)
     5215
     5216    file_params = -1 * ones(3, Float)                    # [nsta,dt,nt]
     5217
    49125218    # Convert verbose to int C flag
    49135219    if verbose:
     
    49185224    if weights is None:
    49195225        weights = ones(numSrc)
    4920        
     5226
    49215227    if permutation is None:
    4922         permutation = ensure_numeric([], Float)   
    4923 
    4924     #print 'filenames', filenames
    4925     #print 'weights', weights
    4926                
    4927     # Call underlying C implementation urs2sts_ext.c