source: anuga_core/source/anuga/shallow_water/data_manager.py @ 5403

Last change on this file since 5403 was 5403, checked in by ole, 16 years ago

Added tolerances for point_on_line and changed interpolate_polyline to use a general relative tolerance.

File size: 215.5 KB
RevLine 
[2852]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
[4663]13.csv: ASCII format for storing arbitrary points and associated attributes
[2852]14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
[3535]33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
[2852]34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
[3560]47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
[2852]48
49"""
50
51import exceptions
[3292]52class TitleValueError(exceptions.Exception): pass
[2852]53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
[4455]57class NewQuantity(exceptions.Exception): pass
[2852]58
59
60
[3292]61import csv
[4598]62import os, sys
[4500]63import shutil
[3720]64from struct import unpack
65import array as p_array
[4271]66#import time, os
[4595]67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
[2852]68
[4595]69
[4868]70from Numeric import concatenate, array, Float, Int, Int32, resize, \
71     sometrue, searchsorted, zeros, allclose, around, reshape, \
72     transpose, sort, NewAxis, ArrayType, compress, take, arange, \
73     argmax, alltrue, shape, Float32, size
[4595]74
75import string
76
[3720]77from Scientific.IO.NetCDF import NetCDFFile
[4500]78#from shutil import copy
[4595]79from os.path import exists, basename, join
[4636]80from os import getcwd
[2852]81
82
[4271]83from anuga.coordinate_transforms.redfearn import redfearn, \
84     convert_from_latlon_to_utm
[4387]85from anuga.coordinate_transforms.geo_reference import Geo_reference, \
[4455]86     write_NetCDF_georeference, ensure_geo_reference
[4382]87from anuga.geospatial_data.geospatial_data import Geospatial_data,\
88     ensure_absolute
[3642]89from anuga.config import minimum_storable_height as default_minimum_storable_height
[4699]90from anuga.config import max_float
[4271]91from anuga.utilities.numerical_tools import ensure_numeric,  mean
[3720]92from anuga.caching.caching import myhash
93from anuga.utilities.anuga_exceptions import ANUGAError
[4271]94from anuga.shallow_water import Domain
95from anuga.abstract_2d_finite_volumes.pmesh2domain import \
96     pmesh_to_domain_instance
[4480]97from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
[4567]98     remove_lone_verts, sww2timeseries, get_centroid_values
[4497]99from anuga.load_mesh.loadASCII import export_mesh_file
[5226]100from anuga.utilities.polygon import intersection
101
102
[4462]103# formula mappings
104
105quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
106                    'depth':'stage-elevation',
107                    'speed': \
108 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
109
110
[4271]111   
[2852]112def make_filename(s):
[4612]113    """Transform argument string into a Sexsuitable filename
[2852]114    """
115
116    s = s.strip()
117    s = s.replace(' ', '_')
118    s = s.replace('(', '')
119    s = s.replace(')', '')
120    s = s.replace('__', '_')
121
122    return s
123
124
125def check_dir(path, verbose=None):
126    """Check that specified path exists.
127    If path does not exist it will be created if possible
128
129    USAGE:
130       checkdir(path, verbose):
131
132    ARGUMENTS:
133        path -- Directory
134        verbose -- Flag verbose output (default: None)
135
136    RETURN VALUE:
137        Verified path including trailing separator
138
139    """
140
141    import os.path
142
143    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
144        unix = 0
145    else:
146        unix = 1
147
148
149    if path[-1] != os.sep:
150        path = path + os.sep  # Add separator for directories
151
152    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
153    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
154        try:
155            exitcode=os.mkdir(path)
156
157            # Change access rights if possible
158            #
159            if unix:
160                exitcode=os.system('chmod 775 '+path)
161            else:
162                pass  # FIXME: What about acces rights under Windows?
163
164            if verbose: print 'MESSAGE: Directory', path, 'created.'
165
166        except:
167            print 'WARNING: Directory', path, 'could not be created.'
168            if unix:
169                path = '/tmp/'
170            else:
171                path = 'C:'
172
173            print 'Using directory %s instead' %path
174
175    return(path)
176
177
178
179def del_dir(path):
180    """Recursively delete directory path and all its contents
181    """
182
183    import os
184
185    if os.path.isdir(path):
186        for file in os.listdir(path):
187            X = os.path.join(path, file)
188
189
190            if os.path.isdir(X) and not os.path.islink(X):
191                del_dir(X)
192            else:
193                try:
194                    os.remove(X)
195                except:
196                    print "Could not remove file %s" %X
197
198        os.rmdir(path)
[4598]199       
200       
201# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
[4603]202def rmgeneric(path, __func__,verbose=False):
203    ERROR_STR= """Error removing %(path)s, %(error)s """
[2852]204
[4603]205    try:
206        __func__(path)
207        if verbose: print 'Removed ', path
208    except OSError, (errno, strerror):
209        print ERROR_STR % {'path' : path, 'error': strerror }
210           
211def removeall(path,verbose=False):
[2852]212
[4603]213    if not os.path.isdir(path):
214        return
215   
216    files=os.listdir(path)
[2852]217
[4603]218    for x in files:
219        fullpath=os.path.join(path, x)
220        if os.path.isfile(fullpath):
221            f=os.remove
222            rmgeneric(fullpath, f)
223        elif os.path.isdir(fullpath):
224            removeall(fullpath)
225            f=os.rmdir
226            rmgeneric(fullpath, f,verbose)
227
228
229
[2852]230def create_filename(datadir, filename, format, size=None, time=None):
231
232    import os
[3514]233    #from anuga.config import data_dir
[2852]234
235    FN = check_dir(datadir) + filename
236
237    if size is not None:
238        FN += '_size%d' %size
239
240    if time is not None:
241        FN += '_time%.2f' %time
242
243    FN += '.' + format
244    return FN
245
246
247def get_files(datadir, filename, format, size):
[4595]248    """Get all file (names) with given name, size and format
[2852]249    """
250
251    import glob
252
253    import os
[3514]254    #from anuga.config import data_dir
[2852]255
256    dir = check_dir(datadir)
257
258    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
259    return glob.glob(pattern)
260
261
262
263#Generic class for storing output to e.g. visualisation or checkpointing
264class Data_format:
265    """Generic interface to data formats
266    """
267
268
269    def __init__(self, domain, extension, mode = 'w'):
270        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
271                                        '''   'w' (write)'''+\
272                                        '''   'r' (read)''' +\
273                                        '''   'a' (append)'''
274
275        #Create filename
276        self.filename = create_filename(domain.get_datadir(),
[3928]277                                        domain.get_name(), extension)
[2852]278
279        #print 'F', self.filename
280        self.timestep = 0
281        self.domain = domain
[3928]282       
[2852]283
284
[3928]285        # Exclude ghosts in case this is a parallel domain
286        self.number_of_nodes = domain.number_of_full_nodes       
287        self.number_of_volumes = domain.number_of_full_triangles
288        #self.number_of_volumes = len(domain)       
289
290
291
292
[2852]293        #FIXME: Should we have a general set_precision function?
294
295
296
297#Class for storing output to e.g. visualisation
298class Data_format_sww(Data_format):
299    """Interface to native NetCDF format (.sww) for storing model output
300
301    There are two kinds of data
302
303    1: Constant data: Vertex coordinates and field values. Stored once
304    2: Variable data: Conserved quantities. Stored once per timestep.
305
306    All data is assumed to reside at vertex locations.
307    """
308
309
310    def __init__(self, domain, mode = 'w',\
311                 max_size = 2000000000,
312                 recursion = False):
313        from Scientific.IO.NetCDF import NetCDFFile
314        from Numeric import Int, Float, Float32
315
316        self.precision = Float32 #Use single precision for quantities
317        if hasattr(domain, 'max_size'):
318            self.max_size = domain.max_size #file size max is 2Gig
319        else:
320            self.max_size = max_size
321        self.recursion = recursion
322        self.mode = mode
323
324        Data_format.__init__(self, domain, 'sww', mode)
325
[3642]326        if hasattr(domain, 'minimum_storable_height'):
[4704]327            self.minimum_storable_height = domain.minimum_storable_height
[3529]328        else:
[3642]329            self.minimum_storable_height = default_minimum_storable_height
[2852]330
331        # NetCDF file definition
332        fid = NetCDFFile(self.filename, mode)
333
334        if mode == 'w':
[4455]335            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
336            self.writer = Write_sww()
[4704]337            self.writer.store_header(fid,
338                                     domain.starttime,
339                                     self.number_of_volumes,
340                                     self.domain.number_of_full_nodes,
341                                     description=description,
342                                     smoothing=domain.smooth,
[4862]343                                     order=domain.default_order,
344                                     sww_precision=self.precision)
[4704]345
346            # Extra optional information
[4455]347            if hasattr(domain, 'texture'):
[4704]348                fid.texture = domain.texture
[2852]349
[4704]350            if domain.quantities_to_be_monitored is not None:
351                fid.createDimension('singleton', 1)
[4705]352                fid.createDimension('two', 2)               
353
354                poly = domain.monitor_polygon
355                if poly is not None:
356                    N = len(poly)
357                    fid.createDimension('polygon_length', N)
[4706]358                    fid.createVariable('extrema.polygon',
[4705]359                                       self.precision,
360                                       ('polygon_length',
361                                        'two'))
[4706]362                    fid.variables['extrema.polygon'][:] = poly                                   
363
[4705]364                   
365                interval = domain.monitor_time_interval
366                if interval is not None:
[4706]367                    fid.createVariable('extrema.time_interval',
[4705]368                                       self.precision,
369                                       ('two',))
[4706]370                    fid.variables['extrema.time_interval'][:] = interval
[4705]371
372               
[4704]373                for q in domain.quantities_to_be_monitored:
374                    #print 'doing', q
[4706]375                    fid.createVariable(q+'.extrema', self.precision,
[4704]376                                       ('numbers_in_range',))
[4706]377                    fid.createVariable(q+'.min_location', self.precision,
[4704]378                                       ('numbers_in_range',))
[4706]379                    fid.createVariable(q+'.max_location', self.precision,
[4704]380                                       ('numbers_in_range',))
[4706]381                    fid.createVariable(q+'.min_time', self.precision,
[4704]382                                       ('singleton',))
[4706]383                    fid.createVariable(q+'.max_time', self.precision,
[4704]384                                       ('singleton',))
[2852]385
[4704]386                   
[2852]387        fid.close()
388
389
390    def store_connectivity(self):
391        """Specialisation of store_connectivity for net CDF format
392
393        Writes x,y,z coordinates of triangles constituting
394        the bed elevation.
395        """
396
397        from Scientific.IO.NetCDF import NetCDFFile
398
399        from Numeric import concatenate, Int
400
401        domain = self.domain
402
403        #Get NetCDF
404        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
405
406        # Get the variables
407        x = fid.variables['x']
408        y = fid.variables['y']
409        z = fid.variables['elevation']
410
411        volumes = fid.variables['volumes']
412
413        # Get X, Y and bed elevation Z
414        Q = domain.quantities['elevation']
415        X,Y,Z,V = Q.get_vertex_values(xy=True,
[3926]416                                      precision=self.precision)
[2852]417
[4455]418        #
419        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
[4704]420        self.writer.store_triangulation(fid,
421                                        points,
422                                        V.astype(volumes.typecode()),
423                                        Z,
424                                        points_georeference= \
425                                        domain.geo_reference)
[2852]426
[4704]427        # Close
428        fid.close()
[2852]429
430
[4868]431    def store_timestep(self, names=None):
[2852]432        """Store time and named quantities to file
433        """
[4868]434       
[2852]435        from Scientific.IO.NetCDF import NetCDFFile
436        import types
437        from time import sleep
438        from os import stat
439
440        from Numeric import choose
[4868]441
442
443        if names is None:
444            # Standard shallow water wave equation quantitites in ANUGA
445            names = ['stage', 'xmomentum', 'ymomentum']
[4455]446       
[4704]447        # Get NetCDF       
[2852]448        retries = 0
449        file_open = False
450        while not file_open and retries < 10:
451            try:
[4704]452                fid = NetCDFFile(self.filename, 'a') # Open existing file
[2852]453            except IOError:
[4704]454                # This could happen if someone was reading the file.
455                # In that case, wait a while and try again
[2852]456                msg = 'Warning (store_timestep): File %s could not be opened'\
457                      %self.filename
458                msg += ' - trying step %s again' %self.domain.time
459                print msg
460                retries += 1
461                sleep(1)
462            else:
463                file_open = True
464
465        if not file_open:
466            msg = 'File %s could not be opened for append' %self.filename
467            raise DataFileNotOpenError, msg
468
469
470
[4704]471        # Check to see if the file is already too big:
[2852]472        time = fid.variables['time']
473        i = len(time)+1
474        file_size = stat(self.filename)[6]
475        file_size_increase =  file_size/i
476        if file_size + file_size_increase > self.max_size*(2**self.recursion):
[4704]477            # In order to get the file name and start time correct,
478            # I change the domain.filename and domain.starttime.
479            # This is the only way to do this without changing
480            # other modules (I think).
[2852]481
[4704]482            # Write a filename addon that won't break swollens reader
483            # (10.sww is bad)
[2852]484            filename_ext = '_time_%s'%self.domain.time
485            filename_ext = filename_ext.replace('.', '_')
[4704]486           
487            # Remember the old filename, then give domain a
488            # name with the extension
[3846]489            old_domain_filename = self.domain.get_name()
[2852]490            if not self.recursion:
[3846]491                self.domain.set_name(old_domain_filename+filename_ext)
[2852]492
[3846]493
[4704]494            # Change the domain starttime to the current time
[2852]495            old_domain_starttime = self.domain.starttime
496            self.domain.starttime = self.domain.time
497
[4704]498            # Build a new data_structure.
[2852]499            next_data_structure=\
500                Data_format_sww(self.domain, mode=self.mode,\
501                                max_size = self.max_size,\
502                                recursion = self.recursion+1)
503            if not self.recursion:
504                print '    file_size = %s'%file_size
505                print '    saving file to %s'%next_data_structure.filename
506            #set up the new data_structure
507            self.domain.writer = next_data_structure
508
509            #FIXME - could be cleaner to use domain.store_timestep etc.
510            next_data_structure.store_connectivity()
511            next_data_structure.store_timestep(names)
512            fid.sync()
513            fid.close()
514
515            #restore the old starttime and filename
516            self.domain.starttime = old_domain_starttime
[3846]517            self.domain.set_name(old_domain_filename)           
[2852]518        else:
519            self.recursion = False
520            domain = self.domain
521
522            # Get the variables
523            time = fid.variables['time']
524            stage = fid.variables['stage']
525            xmomentum = fid.variables['xmomentum']
526            ymomentum = fid.variables['ymomentum']
527            i = len(time)
528            if type(names) not in [types.ListType, types.TupleType]:
529                names = [names]
530
[4455]531            if 'stage' in names and 'xmomentum' in names and \
[4704]532               'ymomentum' in names:
[4455]533
[4868]534                # Get stage, elevation, depth and select only those
535                # values where minimum_storable_height is exceeded
[4455]536                Q = domain.quantities['stage']
[4868]537                A, _ = Q.get_vertex_values(xy = False,
538                                           precision = self.precision)
[4455]539                z = fid.variables['elevation']
[4868]540
541                storable_indices = A-z[:] >= self.minimum_storable_height
542                stage = choose(storable_indices, (z[:], A))
[4455]543               
[4868]544                # Define a zero vector of same size and type as A
545                # for use with momenta
546                null = zeros(size(A), A.typecode())
547               
548                # Get xmomentum where depth exceeds minimum_storable_height
[4455]549                Q = domain.quantities['xmomentum']
[4868]550                xmom, _ = Q.get_vertex_values(xy = False,
551                                              precision = self.precision)
552                xmomentum = choose(storable_indices, (null, xmom))
[4455]553               
[4868]554
555                # Get ymomentum where depth exceeds minimum_storable_height
[4455]556                Q = domain.quantities['ymomentum']
[4868]557                ymom, _ = Q.get_vertex_values(xy = False,
558                                              precision = self.precision)
559                ymomentum = choose(storable_indices, (null, ymom))               
[4704]560               
561                # Write quantities to NetCDF
562                self.writer.store_quantities(fid, 
563                                             time=self.domain.time,
[4862]564                                             sww_precision=self.precision,
[4704]565                                             stage=stage,
566                                             xmomentum=xmomentum,
567                                             ymomentum=ymomentum)
[4455]568            else:
[4868]569                msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
570                msg += ' Instead I got: ' + str(names)
571                raise Exception, msg
572           
[2852]573
[3946]574
[4704]575            # Update extrema if requested
576            domain = self.domain
577            if domain.quantities_to_be_monitored is not None:
578                for q, info in domain.quantities_to_be_monitored.items():
579
580                    if info['min'] is not None:
[4706]581                        fid.variables[q + '.extrema'][0] = info['min']
582                        fid.variables[q + '.min_location'][:] =\
[4704]583                                        info['min_location']
[4706]584                        fid.variables[q + '.min_time'][0] = info['min_time']
[4704]585                       
586                    if info['max'] is not None:
[4706]587                        fid.variables[q + '.extrema'][1] = info['max']
588                        fid.variables[q + '.max_location'][:] =\
[4704]589                                        info['max_location']
[4706]590                        fid.variables[q + '.max_time'][0] = info['max_time']
[4704]591
592           
593
[4868]594            # Flush and close
[2852]595            fid.sync()
596            fid.close()
597
598
599
[4704]600# Class for handling checkpoints data
[2852]601class Data_format_cpt(Data_format):
602    """Interface to native NetCDF format (.cpt)
603    """
604
605
606    def __init__(self, domain, mode = 'w'):
607        from Scientific.IO.NetCDF import NetCDFFile
608        from Numeric import Int, Float, Float
609
610        self.precision = Float #Use full precision
611
612        Data_format.__init__(self, domain, 'sww', mode)
613
614
615        # NetCDF file definition
616        fid = NetCDFFile(self.filename, mode)
617
618        if mode == 'w':
619            #Create new file
620            fid.institution = 'Geoscience Australia'
621            fid.description = 'Checkpoint data'
622            #fid.smooth = domain.smooth
623            fid.order = domain.default_order
624
625            # dimension definitions
626            fid.createDimension('number_of_volumes', self.number_of_volumes)
627            fid.createDimension('number_of_vertices', 3)
628
629            #Store info at all vertices (no smoothing)
630            fid.createDimension('number_of_points', 3*self.number_of_volumes)
631            fid.createDimension('number_of_timesteps', None) #extensible
632
633            # variable definitions
634
635            #Mesh
636            fid.createVariable('x', self.precision, ('number_of_points',))
637            fid.createVariable('y', self.precision, ('number_of_points',))
638
639
640            fid.createVariable('volumes', Int, ('number_of_volumes',
641                                                'number_of_vertices'))
642
643            fid.createVariable('time', self.precision,
644                               ('number_of_timesteps',))
645
646            #Allocate space for all quantities
647            for name in domain.quantities.keys():
648                fid.createVariable(name, self.precision,
649                                   ('number_of_timesteps',
650                                    'number_of_points'))
651
652        #Close
653        fid.close()
654
655
656    def store_checkpoint(self):
657        """
658        Write x,y coordinates of triangles.
659        Write connectivity (
660        constituting
661        the bed elevation.
662        """
663
664        from Scientific.IO.NetCDF import NetCDFFile
665
666        from Numeric import concatenate
667
668        domain = self.domain
669
670        #Get NetCDF
671        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
672
673        # Get the variables
674        x = fid.variables['x']
675        y = fid.variables['y']
676
677        volumes = fid.variables['volumes']
678
679        # Get X, Y and bed elevation Z
680        Q = domain.quantities['elevation']
681        X,Y,Z,V = Q.get_vertex_values(xy=True,
682                      precision = self.precision)
683
684
685
686        x[:] = X.astype(self.precision)
687        y[:] = Y.astype(self.precision)
688        z[:] = Z.astype(self.precision)
689
690        volumes[:] = V
691
692        #Close
693        fid.close()
694
695
696    def store_timestep(self, name):
697        """Store time and named quantity to file
698        """
699        from Scientific.IO.NetCDF import NetCDFFile
700        from time import sleep
701
702        #Get NetCDF
703        retries = 0
704        file_open = False
705        while not file_open and retries < 10:
706            try:
707                fid = NetCDFFile(self.filename, 'a') #Open existing file
708            except IOError:
709                #This could happen if someone was reading the file.
710                #In that case, wait a while and try again
711                msg = 'Warning (store_timestep): File %s could not be opened'\
712                  %self.filename
713                msg += ' - trying again'
714                print msg
715                retries += 1
716                sleep(1)
717            else:
718                file_open = True
719
720        if not file_open:
721            msg = 'File %s could not be opened for append' %self.filename
722            raise DataFileNotOPenError, msg
723
724
725        domain = self.domain
726
727        # Get the variables
728        time = fid.variables['time']
729        stage = fid.variables['stage']
730        i = len(time)
731
732        #Store stage
733        time[i] = self.domain.time
734
735        # Get quantity
736        Q = domain.quantities[name]
737        A,V = Q.get_vertex_values(xy=False,
[4704]738                                  precision = self.precision)
[2852]739
740        stage[i,:] = A.astype(self.precision)
741
742        #Flush and close
743        fid.sync()
744        fid.close()
745
746
[4326]747#### NED is national exposure database (name changed to NEXIS)
[3292]748   
749LAT_TITLE = 'LATITUDE'
750LONG_TITLE = 'LONGITUDE'
[3336]751X_TITLE = 'x'
752Y_TITLE = 'y'
[3292]753class Exposure_csv:
754    def __init__(self,file_name, latitude_title=LAT_TITLE,
[3398]755                 longitude_title=LONG_TITLE, is_x_y_locations=None,
[3336]756                 x_title=X_TITLE, y_title=Y_TITLE,
[4326]757                 refine_polygon=None, title_check_list=None):
[3292]758        """
[3296]759        This class is for handling the exposure csv file.
760        It reads the file in and converts the lats and longs to a geospatial
761        data object.
762        Use the methods to read and write columns.
763
764        The format of the csv files it reads is;
765           The first row is a title row.
766           comma's are the delimiters
767           each column is a 'set' of data
768
769        Feel free to use/expand it to read other csv files.
770           
771           
772        It is not for adding and deleting rows
773       
[3292]774        Can geospatial handle string attributes? It's not made for them.
775        Currently it can't load and save string att's.
776
777        So just use geospatial to hold the x, y and georef? Bad, since
778        different att's are in diferent structures.  Not so bad, the info
779        to write if the .csv file is saved is in attribute_dic
780
781        The location info is in the geospatial attribute.
782       
783       
784        """
785        self._file_name = file_name
786        self._geospatial = None #
787
788        # self._attribute_dic is a dictionary.
789        #The keys are the column titles.
790        #The values are lists of column data
791       
792        # self._title_index_dic is a dictionary.
793        #The keys are the column titles.
794        #The values are the index positions of file columns.
795        self._attribute_dic, self._title_index_dic = \
[4612]796            csv2dict(self._file_name, title_check_list=title_check_list)
[3292]797        try:
[4059]798            #Have code here that handles caps or lower
[3292]799            lats = self._attribute_dic[latitude_title]
800            longs = self._attribute_dic[longitude_title]
801           
802        except KeyError:
803            # maybe a warning..
[3398]804            #Let's see if this works..
805            if False != is_x_y_locations:
806                is_x_y_locations = True
[3292]807            pass
808        else:
809            self._geospatial = Geospatial_data(latitudes = lats,
810                 longitudes = longs)
[3336]811
[3398]812        if is_x_y_locations is True:
[3336]813            if self._geospatial is not None:
814                pass #fixme throw an error
815            try:
816                xs = self._attribute_dic[x_title]
817                ys = self._attribute_dic[y_title]
818                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
819            except KeyError:
820                # maybe a warning..
[3664]821                msg = "Could not find location information."
[3336]822                raise TitleValueError, msg
823            else:
824                self._geospatial = Geospatial_data(data_points=points)
[3292]825           
826        # create a list of points that are in the refining_polygon
827        # described by a list of indexes representing the points
828
829    def __cmp__(self, other):
830        #print "self._attribute_dic",self._attribute_dic
831        #print "other._attribute_dic",other._attribute_dic
832        #print "self._title_index_dic", self._title_index_dic
833        #print "other._title_index_dic", other._title_index_dic
834       
835        #check that a is an instance of this class
836        if isinstance(self, type(other)):
837            result = cmp(self._attribute_dic, other._attribute_dic)
838            if result <>0:
839                return result
840            # The order of the columns is important. Therefore..
841            result = cmp(self._title_index_dic, other._title_index_dic)
842            if result <>0:
843                return result
844            for self_ls, other_ls in map(None,self._attribute_dic, \
845                    other._attribute_dic):
846                result = cmp(self._attribute_dic[self_ls],
847                             other._attribute_dic[other_ls])
848                if result <>0:
849                    return result
850            return 0
851        else:
852            return 1
853   
854
855    def get_column(self, column_name, use_refind_polygon=False):
856        """
857        Given a column name return a list of the column values
858
859        Note, the type of the values will be String!
[3437]860        do this to change a list of strings to a list of floats
861        time = [float(x) for x in time]
[3292]862       
863        Not implemented:
864        if use_refind_polygon is True, only return values in the
865        refined polygon
866        """
867        if not self._attribute_dic.has_key(column_name):
868            msg = 'Therer is  no column called %s!' %column_name
869            raise TitleValueError, msg
870        return self._attribute_dic[column_name]
871
[3437]872
873    def get_value(self, value_column_name,
874                  known_column_name,
875                  known_values,
876                  use_refind_polygon=False):
877        """
878        Do linear interpolation on the known_colum, using the known_value,
879        to return a value of the column_value_name.
880        """
881        pass
882
883
[3292]884    def get_location(self, use_refind_polygon=False):
885        """
886        Return a geospatial object which describes the
887        locations of the location file.
888
889        Note, if there is not location info, this returns None.
890       
891        Not implemented:
892        if use_refind_polygon is True, only return values in the
893        refined polygon
894        """
895        return self._geospatial
896
897    def set_column(self, column_name, column_values, overwrite=False):
898        """
899        Add a column to the 'end' (with the right most column being the end)
900        of the csv file.
901
902        Set overwrite to True if you want to overwrite a column.
903       
904        Note, in column_name white space is removed and case is not checked.
905        Precondition
906        The column_name and column_values cannot have comma's in it.
907        """
908       
909        value_row_count = \
910            len(self._attribute_dic[self._title_index_dic.keys()[0]])
911        if len(column_values) <> value_row_count: 
912            msg = 'The number of column values must equal the number of rows.'
913            raise DataMissingValuesError, msg
914       
915        if self._attribute_dic.has_key(column_name):
916            if not overwrite:
917                msg = 'Column name %s already in use!' %column_name
918                raise TitleValueError, msg
919        else:
920            # New title.  Add it to the title index.
921            self._title_index_dic[column_name] = len(self._title_index_dic)
922        self._attribute_dic[column_name] = column_values
923        #print "self._title_index_dic[column_name]",self._title_index_dic
924
925    def save(self, file_name=None):
926        """
927        Save the exposure csv file
928        """
929        if file_name is None:
930            file_name = self._file_name
931       
932        fd = open(file_name,'wb')
933        writer = csv.writer(fd)
934       
935        #Write the title to a cvs file
936        line = [None]* len(self._title_index_dic)
937        for title in self._title_index_dic.iterkeys():
938            line[self._title_index_dic[title]]= title
939        writer.writerow(line)
940       
941        # Write the values to a cvs file
942        value_row_count = \
943            len(self._attribute_dic[self._title_index_dic.keys()[0]])
944        for row_i in range(value_row_count):
945            line = [None]* len(self._title_index_dic)
946            for title in self._title_index_dic.iterkeys():
947                line[self._title_index_dic[title]]= \
948                     self._attribute_dic[title][row_i]
949            writer.writerow(line)
950
951
[4612]952def csv2dict(file_name, title_check_list=None):
953    """
954    Load in the csv as a dic, title as key and column info as value, .
955    Also, create a dic, title as key and column index as value,
956    to keep track of the column order.
[4775]957
958    Two dictionaries are returned.
[4612]959   
960    WARNING: Vaules are returned as strings.
961    do this to change a list of strings to a list of floats
962        time = [float(x) for x in time]
[4775]963
964       
[4612]965    """
966   
967    #
968    attribute_dic = {}
969    title_index_dic = {}
970    titles_stripped = [] # list of titles
971    reader = csv.reader(file(file_name))
972
973    # Read in and manipulate the title info
974    titles = reader.next()
975    for i,title in enumerate(titles):
976        titles_stripped.append(title.strip())
977        title_index_dic[title.strip()] = i
978    title_count = len(titles_stripped)       
979    #print "title_index_dic",title_index_dic
980    if title_check_list is not None:
981        for title_check in title_check_list:
982            #msg = "Reading error.  This row is not present ", title_check
983            #assert title_index_dic.has_key(title_check), msg
984            if not title_index_dic.has_key(title_check):
985                #reader.close()
986                msg = "Reading error.  This row is not present ", \
987                      title_check                     
988                raise IOError, msg
989               
990   
991   
992    #create a dic of colum values, indexed by column title
993    for line in reader:
994        if len(line) <> title_count:
995            raise IOError #FIXME make this nicer
996        for i, value in enumerate(line):
997            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
998       
999    return attribute_dic, title_index_dic
1000
1001
[2852]1002#Auxiliary
1003def write_obj(filename,x,y,z):
1004    """Store x,y,z vectors into filename (obj format)
1005       Vectors are assumed to have dimension (M,3) where
1006       M corresponds to the number elements.
1007       triangles are assumed to be disconnected
1008
1009       The three numbers in each vector correspond to three vertices,
1010
1011       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1012
1013    """
1014    #print 'Writing obj to %s' % filename
1015
1016    import os.path
1017
1018    root, ext = os.path.splitext(filename)
1019    if ext == '.obj':
1020        FN = filename
1021    else:
1022        FN = filename + '.obj'
1023
1024
1025    outfile = open(FN, 'wb')
1026    outfile.write("# Triangulation as an obj file\n")
1027
1028    M, N = x.shape
1029    assert N==3  #Assuming three vertices per element
1030
1031    for i in range(M):
1032        for j in range(N):
1033            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1034
1035    for i in range(M):
1036        base = i*N
1037        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1038
1039    outfile.close()
1040
1041
1042#########################################################
1043#Conversion routines
1044########################################################
1045
1046def sww2obj(basefilename, size):
1047    """Convert netcdf based data output to obj
1048    """
1049    from Scientific.IO.NetCDF import NetCDFFile
1050
1051    from Numeric import Float, zeros
1052
1053    #Get NetCDF
1054    FN = create_filename('.', basefilename, 'sww', size)
1055    print 'Reading from ', FN
1056    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1057
1058
1059    # Get the variables
1060    x = fid.variables['x']
1061    y = fid.variables['y']
1062    z = fid.variables['elevation']
1063    time = fid.variables['time']
1064    stage = fid.variables['stage']
1065
1066    M = size  #Number of lines
1067    xx = zeros((M,3), Float)
1068    yy = zeros((M,3), Float)
1069    zz = zeros((M,3), Float)
1070
1071    for i in range(M):
1072        for j in range(3):
1073            xx[i,j] = x[i+j*M]
1074            yy[i,j] = y[i+j*M]
1075            zz[i,j] = z[i+j*M]
1076
1077    #Write obj for bathymetry
1078    FN = create_filename('.', basefilename, 'obj', size)
1079    write_obj(FN,xx,yy,zz)
1080
1081
1082    #Now read all the data with variable information, combine with
1083    #x,y info and store as obj
1084
1085    for k in range(len(time)):
1086        t = time[k]
1087        print 'Processing timestep %f' %t
1088
1089        for i in range(M):
1090            for j in range(3):
1091                zz[i,j] = stage[k,i+j*M]
1092
1093
1094        #Write obj for variable data
1095        #FN = create_filename(basefilename, 'obj', size, time=t)
1096        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1097        write_obj(FN,xx,yy,zz)
1098
1099
1100def dat2obj(basefilename):
1101    """Convert line based data output to obj
1102    FIXME: Obsolete?
1103    """
1104
1105    import glob, os
[3514]1106    from anuga.config import data_dir
[2852]1107
1108
1109    #Get bathymetry and x,y's
1110    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1111
1112    from Numeric import zeros, Float
1113
1114    M = len(lines)  #Number of lines
1115    x = zeros((M,3), Float)
1116    y = zeros((M,3), Float)
1117    z = zeros((M,3), Float)
1118
1119    ##i = 0
1120    for i, line in enumerate(lines):
1121        tokens = line.split()
1122        values = map(float,tokens)
1123
1124        for j in range(3):
1125            x[i,j] = values[j*3]
1126            y[i,j] = values[j*3+1]
1127            z[i,j] = values[j*3+2]
1128
1129        ##i += 1
1130
1131
1132    #Write obj for bathymetry
1133    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1134
1135
1136    #Now read all the data files with variable information, combine with
1137    #x,y info
1138    #and store as obj
1139
1140    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1141
1142    for filename in files:
1143        print 'Processing %s' % filename
1144
1145        lines = open(data_dir+os.sep+filename,'r').readlines()
1146        assert len(lines) == M
1147        root, ext = os.path.splitext(filename)
1148
1149        #Get time from filename
1150        i0 = filename.find('_time=')
1151        if i0 == -1:
1152            #Skip bathymetry file
1153            continue
1154
1155        i0 += 6  #Position where time starts
1156        i1 = filename.find('.dat')
1157
1158        if i1 > i0:
1159            t = float(filename[i0:i1])
1160        else:
1161            raise DataTimeError, 'Hmmmm'
1162
1163
1164
1165        ##i = 0
1166        for i, line in enumerate(lines):
1167            tokens = line.split()
1168            values = map(float,tokens)
1169
1170            for j in range(3):
1171                z[i,j] = values[j]
1172
1173            ##i += 1
1174
1175        #Write obj for variable data
1176        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1177
1178
1179def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1180    """Read netcdf filename1, pick timesteps first:step:last and save to
1181    nettcdf file filename2
1182    """
1183    from Scientific.IO.NetCDF import NetCDFFile
1184
1185    #Get NetCDF
1186    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1187    outfile = NetCDFFile(filename2, 'w')  #Open new file
1188
1189
1190    #Copy dimensions
1191    for d in infile.dimensions:
1192        outfile.createDimension(d, infile.dimensions[d])
1193
1194    for name in infile.variables:
1195        var = infile.variables[name]
1196        outfile.createVariable(name, var.typecode(), var.dimensions)
1197
1198
1199    #Copy the static variables
1200    for name in infile.variables:
1201        if name == 'time' or name == 'stage':
1202            pass
1203        else:
1204            #Copy
1205            outfile.variables[name][:] = infile.variables[name][:]
1206
1207    #Copy selected timesteps
1208    time = infile.variables['time']
1209    stage = infile.variables['stage']
1210
1211    newtime = outfile.variables['time']
1212    newstage = outfile.variables['stage']
1213
1214    if last is None:
1215        last = len(time)
1216
1217    selection = range(first, last, step)
1218    for i, j in enumerate(selection):
1219        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1220        newtime[i] = time[j]
1221        newstage[i,:] = stage[j,:]
1222
1223    #Close
1224    infile.close()
1225    outfile.close()
1226
1227
1228#Get data objects
1229def get_dataobject(domain, mode='w'):
1230    """Return instance of class of given format using filename
1231    """
1232
1233    cls = eval('Data_format_%s' %domain.format)
1234    return cls(domain, mode)
1235
1236
1237
1238
1239def dem2pts(basename_in, basename_out=None,
1240            easting_min=None, easting_max=None,
1241            northing_min=None, northing_max=None,
1242            use_cache=False, verbose=False,):
1243    """Read Digitial Elevation model from the following NetCDF format (.dem)
1244
1245    Example:
1246
1247    ncols         3121
1248    nrows         1800
1249    xllcorner     722000
1250    yllcorner     5893000
1251    cellsize      25
1252    NODATA_value  -9999
1253    138.3698 137.4194 136.5062 135.5558 ..........
1254
1255    Convert to NetCDF pts format which is
1256
1257    points:  (Nx2) Float array
1258    elevation: N Float array
1259    """
1260
1261
1262
1263    kwargs = {'basename_out': basename_out,
1264              'easting_min': easting_min,
1265              'easting_max': easting_max,
1266              'northing_min': northing_min,
1267              'northing_max': northing_max,
1268              'verbose': verbose}
1269
1270    if use_cache is True:
1271        from caching import cache
1272        result = cache(_dem2pts, basename_in, kwargs,
1273                       dependencies = [basename_in + '.dem'],
1274                       verbose = verbose)
1275
1276    else:
1277        result = apply(_dem2pts, [basename_in], kwargs)
1278
1279    return result
1280
1281
1282def _dem2pts(basename_in, basename_out=None, verbose=False,
1283            easting_min=None, easting_max=None,
1284            northing_min=None, northing_max=None):
1285    """Read Digitial Elevation model from the following NetCDF format (.dem)
1286
1287    Internal function. See public function dem2pts for details.
1288    """
1289
[4776]1290    # FIXME: Can this be written feasibly using write_pts?
[2852]1291
1292    import os
1293    from Scientific.IO.NetCDF import NetCDFFile
1294    from Numeric import Float, zeros, reshape, sum
1295
1296    root = basename_in
1297
[4776]1298    # Get NetCDF
1299    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
[2852]1300
1301    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1302
1303    ncols = infile.ncols[0]
1304    nrows = infile.nrows[0]
[4776]1305    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1306    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
[2852]1307    cellsize = infile.cellsize[0]
1308    NODATA_value = infile.NODATA_value[0]
1309    dem_elevation = infile.variables['elevation']
1310
1311    zone = infile.zone[0]
1312    false_easting = infile.false_easting[0]
1313    false_northing = infile.false_northing[0]
1314
[4776]1315    # Text strings
[2852]1316    projection = infile.projection
1317    datum = infile.datum
1318    units = infile.units
1319
1320
[4776]1321    # Get output file
[2852]1322    if basename_out == None:
1323        ptsname = root + '.pts'
1324    else:
1325        ptsname = basename_out + '.pts'
1326
1327    if verbose: print 'Store to NetCDF file %s' %ptsname
1328    # NetCDF file definition
1329    outfile = NetCDFFile(ptsname, 'w')
1330
[4776]1331    # Create new file
[2852]1332    outfile.institution = 'Geoscience Australia'
1333    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1334                      'of spatial point data'
[4776]1335    # Assign default values
[2852]1336    if easting_min is None: easting_min = xllcorner
1337    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1338    if northing_min is None: northing_min = yllcorner
1339    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1340
[4776]1341    # Compute offsets to update georeferencing
[2852]1342    easting_offset = xllcorner - easting_min
1343    northing_offset = yllcorner - northing_min
1344
[4776]1345    # Georeferencing
[2852]1346    outfile.zone = zone
[4776]1347    outfile.xllcorner = easting_min # Easting of lower left corner
1348    outfile.yllcorner = northing_min # Northing of lower left corner
[2852]1349    outfile.false_easting = false_easting
1350    outfile.false_northing = false_northing
1351
1352    outfile.projection = projection
1353    outfile.datum = datum
1354    outfile.units = units
1355
1356
[4776]1357    # Grid info (FIXME: probably not going to be used, but heck)
[2852]1358    outfile.ncols = ncols
1359    outfile.nrows = nrows
1360
1361    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1362    totalnopoints = nrows*ncols
1363
[4776]1364    # Calculating number of NODATA_values for each row in clipped region
1365    # FIXME: use array operations to do faster
[2852]1366    nn = 0
1367    k = 0
1368    i1_0 = 0
1369    j1_0 = 0
1370    thisj = 0
1371    thisi = 0
1372    for i in range(nrows):
1373        y = (nrows-i-1)*cellsize + yllcorner
1374        for j in range(ncols):
1375            x = j*cellsize + xllcorner
1376            if easting_min <= x <= easting_max and \
1377               northing_min <= y <= northing_max:
1378                thisj = j
1379                thisi = i
1380                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1381
1382                if k == 0:
1383                    i1_0 = i
1384                    j1_0 = j
1385                k += 1
1386
1387    index1 = j1_0
1388    index2 = thisj
1389
[4776]1390    # Dimension definitions
[2852]1391    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1392    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1393
1394    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1395    nopoints = clippednopoints-nn
1396
1397    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1398
[3664]1399    if verbose:
[2852]1400        print 'There are %d values in the elevation' %totalnopoints
1401        print 'There are %d values in the clipped elevation' %clippednopoints
1402        print 'There are %d NODATA_values in the clipped elevation' %nn
1403
1404    outfile.createDimension('number_of_points', nopoints)
1405    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1406
[4776]1407    # Variable definitions
[2852]1408    outfile.createVariable('points', Float, ('number_of_points',
1409                                             'number_of_dimensions'))
1410    outfile.createVariable('elevation', Float, ('number_of_points',))
1411
1412    # Get handles to the variables
1413    points = outfile.variables['points']
1414    elevation = outfile.variables['elevation']
1415
1416    lenv = index2-index1+1
[4776]1417    # Store data
[2852]1418    global_index = 0
[4776]1419    # for i in range(nrows):
[2852]1420    for i in range(i1_0,thisi+1,1):
1421        if verbose and i%((nrows+10)/10)==0:
1422            print 'Processing row %d of %d' %(i, nrows)
1423
1424        lower_index = global_index
1425
1426        v = dem_elevation_r[i,index1:index2+1]
1427        no_NODATA = sum(v == NODATA_value)
1428        if no_NODATA > 0:
[4776]1429            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
[2852]1430        else:
[4776]1431            newcols = lenv # ncols_in_bounding_box
[2852]1432
1433        telev = zeros(newcols, Float)
1434        tpoints = zeros((newcols, 2), Float)
1435
1436        local_index = 0
1437
1438        y = (nrows-i-1)*cellsize + yllcorner
1439        #for j in range(ncols):
1440        for j in range(j1_0,index2+1,1):
1441
1442            x = j*cellsize + xllcorner
1443            if easting_min <= x <= easting_max and \
1444               northing_min <= y <= northing_max and \
1445               dem_elevation_r[i,j] <> NODATA_value:
1446                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1447                telev[local_index] = dem_elevation_r[i, j]
1448                global_index += 1
1449                local_index += 1
1450
1451        upper_index = global_index
1452
1453        if upper_index == lower_index + newcols:
1454            points[lower_index:upper_index, :] = tpoints
1455            elevation[lower_index:upper_index] = telev
1456
1457    assert global_index == nopoints, 'index not equal to number of points'
1458
1459    infile.close()
1460    outfile.close()
1461
1462
1463
1464def _read_hecras_cross_sections(lines):
1465    """Return block of surface lines for each cross section
1466    Starts with SURFACE LINE,
1467    Ends with END CROSS-SECTION
1468    """
1469
1470    points = []
1471
1472    reading_surface = False
1473    for i, line in enumerate(lines):
1474
1475        if len(line.strip()) == 0:    #Ignore blanks
1476            continue
1477
1478        if lines[i].strip().startswith('SURFACE LINE'):
1479            reading_surface = True
1480            continue
1481
1482        if lines[i].strip().startswith('END') and reading_surface:
1483            yield points
1484            reading_surface = False
1485            points = []
1486
1487        if reading_surface:
1488            fields = line.strip().split(',')
1489            easting = float(fields[0])
1490            northing = float(fields[1])
1491            elevation = float(fields[2])
1492            points.append([easting, northing, elevation])
1493
1494
1495
1496
1497def hecras_cross_sections2pts(basename_in,
1498                              basename_out=None,
1499                              verbose=False):
1500    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1501
1502    Example:
1503
1504
1505# RAS export file created on Mon 15Aug2005 11:42
1506# by HEC-RAS Version 3.1.1
1507
1508BEGIN HEADER:
1509  UNITS: METRIC
1510  DTM TYPE: TIN
1511  DTM: v:\1\cit\perth_topo\river_tin
1512  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1513  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1514  MAP PROJECTION: UTM
1515  PROJECTION ZONE: 50
1516  DATUM: AGD66
1517  VERTICAL DATUM:
1518  NUMBER OF REACHES:  19
1519  NUMBER OF CROSS-SECTIONS:  14206
1520END HEADER:
1521
1522
1523Only the SURFACE LINE data of the following form will be utilised
1524
1525  CROSS-SECTION:
1526    STREAM ID:Southern-Wungong
1527    REACH ID:Southern-Wungong
1528    STATION:19040.*
1529    CUT LINE:
1530      405548.671603161 , 6438142.7594925
1531      405734.536092045 , 6438326.10404912
1532      405745.130459356 , 6438331.48627354
1533      405813.89633823 , 6438368.6272789
1534    SURFACE LINE:
1535     405548.67,   6438142.76,   35.37
1536     405552.24,   6438146.28,   35.41
1537     405554.78,   6438148.78,   35.44
1538     405555.80,   6438149.79,   35.44
1539     405559.37,   6438153.31,   35.45
1540     405560.88,   6438154.81,   35.44
1541     405562.93,   6438156.83,   35.42
1542     405566.50,   6438160.35,   35.38
1543     405566.99,   6438160.83,   35.37
1544     ...
1545   END CROSS-SECTION
1546
1547    Convert to NetCDF pts format which is
1548
1549    points:  (Nx2) Float array
1550    elevation: N Float array
1551    """
1552
1553    import os
1554    from Scientific.IO.NetCDF import NetCDFFile
1555    from Numeric import Float, zeros, reshape
1556
1557    root = basename_in
1558
1559    #Get ASCII file
1560    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1561
1562    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1563
1564    lines = infile.readlines()
1565    infile.close()
1566
1567    if verbose: print 'Converting to pts format'
1568
1569    i = 0
1570    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1571        i += 1
1572
1573    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1574    i += 1
1575
1576    assert lines[i].strip().upper().startswith('UNITS:')
1577    units = lines[i].strip().split()[1]
1578    i += 1
1579
1580    assert lines[i].strip().upper().startswith('DTM TYPE:')
1581    i += 1
1582
1583    assert lines[i].strip().upper().startswith('DTM:')
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('STREAM')
1587    i += 1
1588
1589    assert lines[i].strip().upper().startswith('CROSS')
1590    i += 1
1591
1592    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1593    projection = lines[i].strip().split(':')[1]
1594    i += 1
1595
1596    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1597    zone = int(lines[i].strip().split(':')[1])
1598    i += 1
1599
1600    assert lines[i].strip().upper().startswith('DATUM:')
1601    datum = lines[i].strip().split(':')[1]
1602    i += 1
1603
1604    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1605    i += 1
1606
1607    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1608    i += 1
1609
1610    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1611    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1612    i += 1
1613
1614
1615    #Now read all points
1616    points = []
1617    elevation = []
1618    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1619        for k, entry in enumerate(entries):
1620            points.append(entry[:2])
1621            elevation.append(entry[2])
1622
1623
1624    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1625          %(j+1, number_of_cross_sections)
1626    assert j+1 == number_of_cross_sections, msg
1627
1628    #Get output file
1629    if basename_out == None:
1630        ptsname = root + '.pts'
1631    else:
1632        ptsname = basename_out + '.pts'
1633
[4455]1634    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1635    geo = Geospatial_data(points, {"elevation":elevation},
1636                          verbose=verbose, geo_reference=geo_ref)
1637    geo.export_points_file(ptsname)
[2852]1638
[4462]1639def export_grid(basename_in, extra_name_out = None,
1640                quantities = None, # defaults to elevation
1641                timestep = None,
1642                reduction = None,
1643                cellsize = 10,
1644                NODATA_value = -9999,
1645                easting_min = None,
1646                easting_max = None,
1647                northing_min = None,
1648                northing_max = None,
1649                verbose = False,
1650                origin = None,
1651                datum = 'WGS84',
1652                format = 'ers'):
1653    """
1654   
1655    Wrapper for sww2dem. - see sww2dem to find out what most of the
1656    parameters do.
[2852]1657
[4462]1658    Quantities is a list of quantities.  Each quantity will be
1659    calculated for each sww file.
1660
1661    This returns the basenames of the files returned, which is made up
1662    of the dir and all of the file name, except the extension.
1663
1664    This function returns the names of the files produced.
[4535]1665
1666    It will also produce as many output files as there are input sww files.
[4462]1667    """
1668   
1669    if quantities is None:
1670        quantities = ['elevation']
1671       
1672    if type(quantities) is str:
1673            quantities = [quantities]
1674
1675    # How many sww files are there?
1676    dir, base = os.path.split(basename_in)
[4489]1677    #print "basename_in",basename_in
1678    #print "base",base
[4586]1679
1680    iterate_over = get_all_swwfiles(dir,base,verbose)
[4463]1681   
[4526]1682    if dir == "":
1683        dir = "." # Unix compatibility
[4462]1684   
1685    files_out = []
[4586]1686    #print 'sww_file',iterate_over
[4548]1687    for sww_file in iterate_over:
[4462]1688        for quantity in quantities:
1689            if extra_name_out is None:
1690                basename_out = sww_file + '_' + quantity
1691            else:
1692                basename_out = sww_file + '_' + quantity + '_' \
1693                               + extra_name_out
[4524]1694#            print "basename_out", basename_out
[4462]1695       
[4524]1696            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
[4462]1697                               quantity, 
1698                               timestep,
1699                               reduction,
1700                               cellsize,
1701                               NODATA_value,
1702                               easting_min,
1703                               easting_max,
1704                               northing_min,
1705                               northing_max,
1706                               verbose,
1707                               origin,
1708                               datum,
1709                               format)
1710            files_out.append(file_out)
1711    #print "basenames_out after",basenames_out
1712    return files_out
[4545]1713
1714
1715def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1716                   plot_quantity, generate_fig = False,
1717                   reportname = None, surface = False, time_min = None,
1718                   time_max = None, title_on = False, verbose = True,
1719                   nodes=None):
1720    """
1721    nodes - number of processes used.
1722
1723    warning - this function has no tests
1724    """
1725    if reportname == None:
1726        report = False
1727    else:
1728        report = True
1729       
1730    if nodes is None:
1731        is_parallel = False
1732    else:
1733        is_parallel = True
1734       
1735    # Generate figures
1736    swwfiles = {}
[4462]1737   
[4545]1738    if is_parallel is True:   
1739        for i in range(nodes):
1740            print 'Sending node %d of %d' %(i,nodes)
1741            swwfiles = {}
1742            if not reportname == None:
1743                reportname = report_name + '_%s' %(i)
1744            for label_id in production_dirs.keys():
1745                if label_id == 'boundaries':
1746                    swwfile = best_boundary_sww
1747                else:
1748                    file_loc = output_dir + label_id + sep
1749                    sww_extra = '_P%s_%s' %(i,nodes)
1750                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1751                    print 'swwfile',swwfile
1752                    swwfiles[swwfile] = label_id
1753
1754                texname, elev_output = sww2timeseries(swwfiles,
1755                                              gauges_dir_name,
1756                                              production_dirs,
1757                                              report = report,
1758                                              reportname = reportname,
1759                                              plot_quantity = plot_quantity,
1760                                              generate_fig = generate_fig,
1761                                              surface = surface,
1762                                              time_min = time_min,
1763                                              time_max = time_max,
1764                                              title_on = title_on,
1765                                              verbose = verbose)
1766    else:   
1767        for label_id in production_dirs.keys():       
1768            if label_id == 'boundaries':
1769                print 'boundaries'
1770                file_loc = project.boundaries_in_dir
1771                swwfile = project.boundaries_dir_name3 + '.sww'
1772                #  swwfile = boundary_dir_filename
1773            else:
1774                file_loc = output_dir + label_id + sep
1775                swwfile = file_loc + scenario_name + '.sww'
1776            swwfiles[swwfile] = label_id
1777       
1778        texname, elev_output = sww2timeseries(swwfiles,
1779                                              gauges_dir_name,
1780                                              production_dirs,
1781                                              report = report,
1782                                              reportname = reportname,
1783                                              plot_quantity = plot_quantity,
1784                                              generate_fig = generate_fig,
1785                                              surface = surface,
1786                                              time_min = time_min,
1787                                              time_max = time_max,
1788                                              title_on = title_on,
1789                                              verbose = verbose)
1790                                         
1791
1792   
[2852]1793def sww2dem(basename_in, basename_out = None,
[4462]1794            quantity = None, # defaults to elevation
[2852]1795            timestep = None,
1796            reduction = None,
1797            cellsize = 10,
1798            NODATA_value = -9999,
1799            easting_min = None,
1800            easting_max = None,
1801            northing_min = None,
1802            northing_max = None,
1803            verbose = False,
1804            origin = None,
1805            datum = 'WGS84',
[4462]1806            format = 'ers'):
[2852]1807
[4663]1808    """Read SWW file and convert to Digitial Elevation model format
1809    (.asc or .ers)
[2852]1810
1811    Example (ASC):
1812
1813    ncols         3121
1814    nrows         1800
1815    xllcorner     722000
1816    yllcorner     5893000
1817    cellsize      25
1818    NODATA_value  -9999
1819    138.3698 137.4194 136.5062 135.5558 ..........
1820
1821    Also write accompanying file with same basename_in but extension .prj
1822    used to fix the UTM zone, datum, false northings and eastings.
1823
1824    The prj format is assumed to be as
1825
1826    Projection    UTM
1827    Zone          56
1828    Datum         WGS84
1829    Zunits        NO
1830    Units         METERS
1831    Spheroid      WGS84
1832    Xshift        0.0000000000
1833    Yshift        10000000.0000000000
1834    Parameters
1835
1836
1837    The parameter quantity must be the name of an existing quantity or
1838    an expression involving existing quantities. The default is
[4462]1839    'elevation'. Quantity is not a list of quantities.
[2852]1840
1841    if timestep (an index) is given, output quantity at that timestep
1842
1843    if reduction is given use that to reduce quantity over all timesteps.
1844
1845    datum
1846
1847    format can be either 'asc' or 'ers'
1848    """
1849
1850    import sys
[4663]1851    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1852         sometrue
[2852]1853    from Numeric import array2string
1854
[4663]1855    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1856         separate_points_by_polygon
1857    from anuga.abstract_2d_finite_volumes.util import \
1858         apply_expression_to_dictionary
[2852]1859
1860    msg = 'Format must be either asc or ers'
1861    assert format.lower() in ['asc', 'ers'], msg
1862
1863
1864    false_easting = 500000
1865    false_northing = 10000000
1866
1867    if quantity is None:
1868        quantity = 'elevation'
[4462]1869       
[2852]1870    if reduction is None:
1871        reduction = max
1872
1873    if basename_out is None:
1874        basename_out = basename_in + '_%s' %quantity
1875
[4462]1876    if quantity_formula.has_key(quantity):
1877        quantity = quantity_formula[quantity]
1878       
[2852]1879    swwfile = basename_in + '.sww'
1880    demfile = basename_out + '.' + format
1881    # Note the use of a .ers extension is optional (write_ermapper_grid will
1882    # deal with either option
[4413]1883   
[4548]1884    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
[4413]1885   
[4551]1886    # Read sww file
[4524]1887    if verbose: 
1888        print 'Reading from %s' %swwfile
1889        print 'Output directory is %s' %basename_out
1890   
[2852]1891    from Scientific.IO.NetCDF import NetCDFFile
1892    fid = NetCDFFile(swwfile)
1893
1894    #Get extent and reference
1895    x = fid.variables['x'][:]
1896    y = fid.variables['y'][:]
1897    volumes = fid.variables['volumes'][:]
[3961]1898    if timestep is not None:
1899        times = fid.variables['time'][timestep]
[3967]1900    else:
1901        times = fid.variables['time'][:]
[2852]1902
1903    number_of_timesteps = fid.dimensions['number_of_timesteps']
1904    number_of_points = fid.dimensions['number_of_points']
[3967]1905   
[2852]1906    if origin is None:
1907
[4551]1908        # Get geo_reference
1909        # sww files don't have to have a geo_ref
[2852]1910        try:
1911            geo_reference = Geo_reference(NetCDFObject=fid)
1912        except AttributeError, e:
[4551]1913            geo_reference = Geo_reference() # Default georef object
[2852]1914
1915        xllcorner = geo_reference.get_xllcorner()
1916        yllcorner = geo_reference.get_yllcorner()
1917        zone = geo_reference.get_zone()
1918    else:
1919        zone = origin[0]
1920        xllcorner = origin[1]
1921        yllcorner = origin[2]
1922
1923
1924
[4663]1925    # FIXME: Refactor using code from Interpolation_function.statistics
1926    # (in interpolate.py)
[4551]1927    # Something like print swwstats(swwname)
[2852]1928    if verbose:
1929        print '------------------------------------------------'
1930        print 'Statistics of SWW file:'
1931        print '  Name: %s' %swwfile
1932        print '  Reference:'
1933        print '    Lower left corner: [%f, %f]'\
1934              %(xllcorner, yllcorner)
[3961]1935        if timestep is not None:
1936            print '    Time: %f' %(times)
1937        else:
1938            print '    Start time: %f' %fid.starttime[0]
[2852]1939        print '  Extent:'
1940        print '    x [m] in [%f, %f], len(x) == %d'\
1941              %(min(x.flat), max(x.flat), len(x.flat))
1942        print '    y [m] in [%f, %f], len(y) == %d'\
1943              %(min(y.flat), max(y.flat), len(y.flat))
[3961]1944        if timestep is not None:
1945            print '    t [s] = %f, len(t) == %d' %(times, 1)
1946        else:
1947            print '    t [s] in [%f, %f], len(t) == %d'\
[3967]1948              %(min(times), max(times), len(times))
[2852]1949        print '  Quantities [SI units]:'
[5189]1950        # Comment out for reduced memory consumption
[3967]1951        for name in ['stage', 'xmomentum', 'ymomentum']:
[2852]1952            q = fid.variables[name][:].flat
[3967]1953            if timestep is not None:
1954                q = q[timestep*len(x):(timestep+1)*len(x)]
1955            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1956        for name in ['elevation']:
1957            q = fid.variables[name][:].flat
1958            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1959           
[2891]1960    # Get quantity and reduce if applicable
[2852]1961    if verbose: print 'Processing quantity %s' %quantity
1962
[2891]1963    # Turn NetCDF objects into Numeric arrays
[5115]1964    try:
1965        q = fid.variables[quantity][:] 
[5189]1966       
1967       
[5115]1968    except:
1969        quantity_dict = {}
1970        for name in fid.variables.keys():
1971            quantity_dict[name] = fid.variables[name][:] 
1972        #Convert quantity expression to quantities found in sww file   
1973        q = apply_expression_to_dictionary(quantity, quantity_dict)
[5189]1974    #print "q.shape",q.shape
[2852]1975    if len(q.shape) == 2:
1976        #q has a time component and needs to be reduced along
1977        #the temporal dimension
1978        if verbose: print 'Reducing quantity %s' %quantity
1979        q_reduced = zeros( number_of_points, Float )
[3967]1980       
1981        if timestep is not None:
1982            for k in range(number_of_points):
1983                q_reduced[k] = q[timestep,k] 
1984        else:
1985            for k in range(number_of_points):
1986                q_reduced[k] = reduction( q[:,k] )
[2852]1987
1988        q = q_reduced
1989
1990    #Post condition: Now q has dimension: number_of_points
1991    assert len(q.shape) == 1
1992    assert q.shape[0] == number_of_points
1993
1994    if verbose:
1995        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1996
1997
1998    #Create grid and update xll/yll corner and x,y
1999
2000    #Relative extent
2001    if easting_min is None:
2002        xmin = min(x)
2003    else:
2004        xmin = easting_min - xllcorner
2005
2006    if easting_max is None:
2007        xmax = max(x)
2008    else:
2009        xmax = easting_max - xllcorner
2010
2011    if northing_min is None:
2012        ymin = min(y)
2013    else:
2014        ymin = northing_min - yllcorner
2015
2016    if northing_max is None:
2017        ymax = max(y)
2018    else:
2019        ymax = northing_max - yllcorner
2020
2021
2022
2023    if verbose: print 'Creating grid'
2024    ncols = int((xmax-xmin)/cellsize)+1
2025    nrows = int((ymax-ymin)/cellsize)+1
[5115]2026   
[2852]2027
2028
2029    #New absolute reference and coordinates
2030    newxllcorner = xmin+xllcorner
2031    newyllcorner = ymin+yllcorner
2032
2033    x = x+xllcorner-newxllcorner
2034    y = y+yllcorner-newyllcorner
[3961]2035   
[2852]2036    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2037    assert len(vertex_points.shape) == 2
2038
2039    grid_points = zeros ( (ncols*nrows, 2), Float )
2040
2041
2042    for i in xrange(nrows):
2043        if format.lower() == 'asc':
2044            yg = i*cellsize
2045        else:
2046        #this will flip the order of the y values for ers
2047            yg = (nrows-i)*cellsize
2048
2049        for j in xrange(ncols):
2050            xg = j*cellsize
2051            k = i*ncols + j
2052
2053            grid_points[k,0] = xg
2054            grid_points[k,1] = yg
2055
2056    #Interpolate
[3514]2057    from anuga.fit_interpolate.interpolate import Interpolate
[2852]2058
[4480]2059    # Remove loners from vertex_points, volumes here
2060    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
[4497]2061    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
[4522]2062    #import sys; sys.exit()
[2852]2063    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2064
2065    #Interpolate using quantity values
2066    if verbose: print 'Interpolating'
2067    grid_values = interp.interpolate(q, grid_points).flat
2068
2069
2070    if verbose:
2071        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2072                                                      max(grid_values))
2073
2074    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2075    P = interp.mesh.get_boundary_polygon()
2076    outside_indices = outside_polygon(grid_points, P, closed=True)
2077
2078    for i in outside_indices:
2079        grid_values[i] = NODATA_value
2080
2081
2082
2083
2084    if format.lower() == 'ers':
2085        # setup ERS header information
2086        grid_values = reshape(grid_values,(nrows, ncols))
2087        header = {}
2088        header['datum'] = '"' + datum + '"'
2089        # FIXME The use of hardwired UTM and zone number needs to be made optional
2090        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2091        header['projection'] = '"UTM-' + str(zone) + '"'
2092        header['coordinatetype'] = 'EN'
2093        if header['coordinatetype'] == 'LL':
2094            header['longitude'] = str(newxllcorner)
2095            header['latitude'] = str(newyllcorner)
2096        elif header['coordinatetype'] == 'EN':
2097            header['eastings'] = str(newxllcorner)
2098            header['northings'] = str(newyllcorner)
2099        header['nullcellvalue'] = str(NODATA_value)
2100        header['xdimension'] = str(cellsize)
2101        header['ydimension'] = str(cellsize)
2102        header['value'] = '"' + quantity + '"'
2103        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2104
2105
2106        #Write
2107        if verbose: print 'Writing %s' %demfile
2108        import ermapper_grids
2109        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2110
2111        fid.close()
2112    else:
2113        #Write to Ascii format
2114
2115        #Write prj file
2116        prjfile = basename_out + '.prj'
2117
2118        if verbose: print 'Writing %s' %prjfile
2119        prjid = open(prjfile, 'w')
2120        prjid.write('Projection    %s\n' %'UTM')
2121        prjid.write('Zone          %d\n' %zone)
2122        prjid.write('Datum         %s\n' %datum)
2123        prjid.write('Zunits        NO\n')
2124        prjid.write('Units         METERS\n')
2125        prjid.write('Spheroid      %s\n' %datum)
2126        prjid.write('Xshift        %d\n' %false_easting)
2127        prjid.write('Yshift        %d\n' %false_northing)
2128        prjid.write('Parameters\n')
2129        prjid.close()
2130
2131
2132
2133        if verbose: print 'Writing %s' %demfile
2134
2135        ascid = open(demfile, 'w')
2136
2137        ascid.write('ncols         %d\n' %ncols)
2138        ascid.write('nrows         %d\n' %nrows)
2139        ascid.write('xllcorner     %d\n' %newxllcorner)
2140        ascid.write('yllcorner     %d\n' %newyllcorner)
2141        ascid.write('cellsize      %f\n' %cellsize)
2142        ascid.write('NODATA_value  %d\n' %NODATA_value)
2143
2144
2145        #Get bounding polygon from mesh
2146        #P = interp.mesh.get_boundary_polygon()
2147        #inside_indices = inside_polygon(grid_points, P)
2148
2149        for i in range(nrows):
2150            if verbose and i%((nrows+10)/10)==0:
2151                print 'Doing row %d of %d' %(i, nrows)
2152
2153            base_index = (nrows-i-1)*ncols
2154
2155            slice = grid_values[base_index:base_index+ncols]
2156            s = array2string(slice, max_line_width=sys.maxint)
2157            ascid.write(s[1:-1] + '\n')
2158
2159
2160            #print
2161            #for j in range(ncols):
2162            #    index = base_index+j#
2163            #    print grid_values[index],
2164            #    ascid.write('%f ' %grid_values[index])
2165            #ascid.write('\n')
2166
2167        #Close
2168        ascid.close()
2169        fid.close()
[4462]2170        return basename_out
2171
[5189]2172
[2852]2173#Backwards compatibility
2174def sww2asc(basename_in, basename_out = None,
2175            quantity = None,
2176            timestep = None,
2177            reduction = None,
2178            cellsize = 10,
2179            verbose = False,
2180            origin = None):
2181    print 'sww2asc will soon be obsoleted - please use sww2dem'
2182    sww2dem(basename_in,
2183            basename_out = basename_out,
2184            quantity = quantity,
2185            timestep = timestep,
2186            reduction = reduction,
2187            cellsize = cellsize,
2188            verbose = verbose,
2189            origin = origin,
2190        datum = 'WGS84',
2191        format = 'asc')
2192
2193def sww2ers(basename_in, basename_out = None,
2194            quantity = None,
2195            timestep = None,
2196            reduction = None,
2197            cellsize = 10,
2198            verbose = False,
2199            origin = None,
2200            datum = 'WGS84'):
2201    print 'sww2ers will soon be obsoleted - please use sww2dem'
2202    sww2dem(basename_in,
2203            basename_out = basename_out,
2204            quantity = quantity,
2205            timestep = timestep,
2206            reduction = reduction,
2207            cellsize = cellsize,
2208            verbose = verbose,
2209            origin = origin,
[2891]2210            datum = datum,
2211            format = 'ers')
[2852]2212################################# END COMPATIBILITY ##############
2213
2214
2215
[2891]2216def sww2pts(basename_in, basename_out=None,
2217            data_points=None,
2218            quantity=None,
2219            timestep=None,
2220            reduction=None,
2221            NODATA_value=-9999,
2222            verbose=False,
2223            origin=None):
2224            #datum = 'WGS84')
2225
2226
2227    """Read SWW file and convert to interpolated values at selected points
2228
2229    The parameter quantity' must be the name of an existing quantity or
2230    an expression involving existing quantities. The default is
2231    'elevation'.
2232
2233    if timestep (an index) is given, output quantity at that timestep
2234
2235    if reduction is given use that to reduce quantity over all timesteps.
2236
2237    data_points (Nx2 array) give locations of points where quantity is to be computed.
2238   
2239    """
2240
2241    import sys
2242    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2243    from Numeric import array2string
2244
[3514]2245    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]2246    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2891]2247
[3514]2248    from anuga.geospatial_data.geospatial_data import Geospatial_data
[2891]2249
2250    if quantity is None:
2251        quantity = 'elevation'
2252
2253    if reduction is None:
2254        reduction = max
2255
2256    if basename_out is None:
2257        basename_out = basename_in + '_%s' %quantity
2258
2259    swwfile = basename_in + '.sww'
2260    ptsfile = basename_out + '.pts'
2261
2262    # Read sww file
2263    if verbose: print 'Reading from %s' %swwfile
2264    from Scientific.IO.NetCDF import NetCDFFile
2265    fid = NetCDFFile(swwfile)
2266
2267    # Get extent and reference
2268    x = fid.variables['x'][:]
2269    y = fid.variables['y'][:]
2270    volumes = fid.variables['volumes'][:]
2271
2272    number_of_timesteps = fid.dimensions['number_of_timesteps']
2273    number_of_points = fid.dimensions['number_of_points']
2274    if origin is None:
2275
2276        # Get geo_reference
2277        # sww files don't have to have a geo_ref
2278        try:
2279            geo_reference = Geo_reference(NetCDFObject=fid)
2280        except AttributeError, e:
2281            geo_reference = Geo_reference() #Default georef object
2282
2283        xllcorner = geo_reference.get_xllcorner()
2284        yllcorner = geo_reference.get_yllcorner()
2285        zone = geo_reference.get_zone()
2286    else:
2287        zone = origin[0]
2288        xllcorner = origin[1]
2289        yllcorner = origin[2]
2290
2291
2292
2293    # FIXME: Refactor using code from file_function.statistics
2294    # Something like print swwstats(swwname)
2295    if verbose:
2296        x = fid.variables['x'][:]
2297        y = fid.variables['y'][:]
2298        times = fid.variables['time'][:]
2299        print '------------------------------------------------'
2300        print 'Statistics of SWW file:'
2301        print '  Name: %s' %swwfile
2302        print '  Reference:'
2303        print '    Lower left corner: [%f, %f]'\
2304              %(xllcorner, yllcorner)
2305        print '    Start time: %f' %fid.starttime[0]
2306        print '  Extent:'
2307        print '    x [m] in [%f, %f], len(x) == %d'\
2308              %(min(x.flat), max(x.flat), len(x.flat))
2309        print '    y [m] in [%f, %f], len(y) == %d'\
2310              %(min(y.flat), max(y.flat), len(y.flat))
2311        print '    t [s] in [%f, %f], len(t) == %d'\
2312              %(min(times), max(times), len(times))
2313        print '  Quantities [SI units]:'
2314        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2315            q = fid.variables[name][:].flat
2316            print '    %s in [%f, %f]' %(name, min(q), max(q))
2317
2318
2319
2320    # Get quantity and reduce if applicable
2321    if verbose: print 'Processing quantity %s' %quantity
2322
2323    # Turn NetCDF objects into Numeric arrays
2324    quantity_dict = {}
2325    for name in fid.variables.keys():
2326        quantity_dict[name] = fid.variables[name][:]
2327
2328
2329
2330    # Convert quantity expression to quantities found in sww file   
2331    q = apply_expression_to_dictionary(quantity, quantity_dict)
2332
2333
2334
2335    if len(q.shape) == 2:
2336        # q has a time component and needs to be reduced along
2337        # the temporal dimension
2338        if verbose: print 'Reducing quantity %s' %quantity
2339        q_reduced = zeros( number_of_points, Float )
2340
2341        for k in range(number_of_points):
2342            q_reduced[k] = reduction( q[:,k] )
2343
2344        q = q_reduced
2345
2346    # Post condition: Now q has dimension: number_of_points
2347    assert len(q.shape) == 1
2348    assert q.shape[0] == number_of_points
2349
2350
2351    if verbose:
2352        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2353
2354
2355    # Create grid and update xll/yll corner and x,y
2356    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2357    assert len(vertex_points.shape) == 2
2358
2359    # Interpolate
[3514]2360    from anuga.fit_interpolate.interpolate import Interpolate
[2891]2361    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2362
2363    # Interpolate using quantity values
2364    if verbose: print 'Interpolating'
2365    interpolated_values = interp.interpolate(q, data_points).flat
2366
2367
2368    if verbose:
2369        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2370                                                      max(interpolated_values))
2371
2372    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2373    P = interp.mesh.get_boundary_polygon()
2374    outside_indices = outside_polygon(data_points, P, closed=True)
2375
2376    for i in outside_indices:
2377        interpolated_values[i] = NODATA_value
2378
2379
2380    # Store results   
2381    G = Geospatial_data(data_points=data_points,
2382                        attributes=interpolated_values)
2383
2384    G.export_points_file(ptsfile, absolute = True)
2385
[2931]2386    fid.close()
[2891]2387
2388
[2852]2389def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2390                                  use_cache = False,
2391                                  verbose = False):
2392    """Read Digitial Elevation model from the following ASCII format (.asc)
2393
2394    Example:
2395
2396    ncols         3121
2397    nrows         1800
2398    xllcorner     722000
2399    yllcorner     5893000
2400    cellsize      25
2401    NODATA_value  -9999
2402    138.3698 137.4194 136.5062 135.5558 ..........
2403
2404    Convert basename_in + '.asc' to NetCDF format (.dem)
2405    mimicking the ASCII format closely.
2406
2407
2408    An accompanying file with same basename_in but extension .prj must exist
2409    and is used to fix the UTM zone, datum, false northings and eastings.
2410
2411    The prj format is assumed to be as
2412
2413    Projection    UTM
2414    Zone          56
2415    Datum         WGS84
2416    Zunits        NO
2417    Units         METERS
2418    Spheroid      WGS84
2419    Xshift        0.0000000000
2420    Yshift        10000000.0000000000
2421    Parameters
2422    """
2423
2424
2425
2426    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2427
2428    if use_cache is True:
2429        from caching import cache
2430        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
[4688]2431                       dependencies = [basename_in + '.asc',
2432                                       basename_in + '.prj'],
[2852]2433                       verbose = verbose)
2434
2435    else:
2436        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2437
2438    return result
2439
2440
2441
2442
2443
2444
2445def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2446                                  verbose = False):
2447    """Read Digitial Elevation model from the following ASCII format (.asc)
2448
2449    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2450    """
2451
2452    import os
2453    from Scientific.IO.NetCDF import NetCDFFile
2454    from Numeric import Float, array
2455
2456    #root, ext = os.path.splitext(basename_in)
2457    root = basename_in
2458
2459    ###########################################
2460    # Read Meta data
2461    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2462    metadatafile = open(root + '.prj')
2463    metalines = metadatafile.readlines()
2464    metadatafile.close()
2465
2466    L = metalines[0].strip().split()
2467    assert L[0].strip().lower() == 'projection'
2468    projection = L[1].strip()                   #TEXT
2469
2470    L = metalines[1].strip().split()
2471    assert L[0].strip().lower() == 'zone'
2472    zone = int(L[1].strip())
2473
2474    L = metalines[2].strip().split()
2475    assert L[0].strip().lower() == 'datum'
2476    datum = L[1].strip()                        #TEXT
2477
2478    L = metalines[3].strip().split()
2479    assert L[0].strip().lower() == 'zunits'     #IGNORE
2480    zunits = L[1].strip()                       #TEXT
2481
2482    L = metalines[4].strip().split()
2483    assert L[0].strip().lower() == 'units'
2484    units = L[1].strip()                        #TEXT
2485
2486    L = metalines[5].strip().split()
2487    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2488    spheroid = L[1].strip()                     #TEXT
2489
2490    L = metalines[6].strip().split()
2491    assert L[0].strip().lower() == 'xshift'
2492    false_easting = float(L[1].strip())
2493
2494    L = metalines[7].strip().split()
2495    assert L[0].strip().lower() == 'yshift'
2496    false_northing = float(L[1].strip())
2497
2498    #print false_easting, false_northing, zone, datum
2499
2500
2501    ###########################################
2502    #Read DEM data
2503
2504    datafile = open(basename_in + '.asc')
2505
2506    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2507    lines = datafile.readlines()
2508    datafile.close()
2509
2510    if verbose: print 'Got', len(lines), ' lines'
2511
2512    ncols = int(lines[0].split()[1].strip())
2513    nrows = int(lines[1].split()[1].strip())
[4824]2514
2515    # Do cellsize (line 4) before line 2 and 3
2516    cellsize = float(lines[4].split()[1].strip())   
2517
2518    # Checks suggested by Joaquim Luis
2519    # Our internal representation of xllcorner
2520    # and yllcorner is non-standard.
2521    xref = lines[2].split()
2522    if xref[0].strip() == 'xllcorner':
2523        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2524    elif xref[0].strip() == 'xllcenter':
2525        xllcorner = float(xref[1].strip())
2526    else:
2527        msg = 'Unknown keyword: %s' %xref[0].strip()
2528        raise Exception, msg
2529
2530    yref = lines[3].split()
2531    if yref[0].strip() == 'yllcorner':
2532        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2533    elif yref[0].strip() == 'yllcenter':
2534        yllcorner = float(yref[1].strip())
2535    else:
2536        msg = 'Unknown keyword: %s' %yref[0].strip()
2537        raise Exception, msg
2538       
2539
[2852]2540    NODATA_value = int(lines[5].split()[1].strip())
2541
2542    assert len(lines) == nrows + 6
2543
2544
2545    ##########################################
2546
2547
2548    if basename_out == None:
2549        netcdfname = root + '.dem'
2550    else:
2551        netcdfname = basename_out + '.dem'
2552
2553    if verbose: print 'Store to NetCDF file %s' %netcdfname
2554    # NetCDF file definition
2555    fid = NetCDFFile(netcdfname, 'w')
2556
2557    #Create new file
2558    fid.institution = 'Geoscience Australia'
2559    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2560                      'of spatial point data'
2561
2562    fid.ncols = ncols
2563    fid.nrows = nrows
2564    fid.xllcorner = xllcorner
2565    fid.yllcorner = yllcorner
2566    fid.cellsize = cellsize
2567    fid.NODATA_value = NODATA_value
2568
2569    fid.zone = zone
2570    fid.false_easting = false_easting
2571    fid.false_northing = false_northing
2572    fid.projection = projection
2573    fid.datum = datum
2574    fid.units = units
2575
2576
2577    # dimension definitions
2578    fid.createDimension('number_of_rows', nrows)
2579    fid.createDimension('number_of_columns', ncols)
2580
2581    # variable definitions
2582    fid.createVariable('elevation', Float, ('number_of_rows',
2583                                            'number_of_columns'))
2584
2585    # Get handles to the variables
2586    elevation = fid.variables['elevation']
2587
2588    #Store data
2589    n = len(lines[6:])
2590    for i, line in enumerate(lines[6:]):
2591        fields = line.split()
2592        if verbose and i%((n+10)/10)==0:
2593            print 'Processing row %d of %d' %(i, nrows)
2594
2595        elevation[i, :] = array([float(x) for x in fields])
2596
2597    fid.close()
2598
2599
2600
2601
2602
2603def ferret2sww(basename_in, basename_out = None,
2604               verbose = False,
2605               minlat = None, maxlat = None,
2606               minlon = None, maxlon = None,
2607               mint = None, maxt = None, mean_stage = 0,
2608               origin = None, zscale = 1,
2609               fail_on_NaN = True,
2610               NaN_filler = 0,
2611               elevation = None,
[3694]2612               inverted_bathymetry = True
[2852]2613               ): #FIXME: Bathymetry should be obtained
2614                                  #from MOST somehow.
2615                                  #Alternatively from elsewhere
2616                                  #or, as a last resort,
2617                                  #specified here.
2618                                  #The value of -100 will work
2619                                  #for the Wollongong tsunami
2620                                  #scenario but is very hacky
2621    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
[3560]2622    sww format native to abstract_2d_finite_volumes.
[2852]2623
2624    Specify only basename_in and read files of the form
2625    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2626    relative height, x-velocity and y-velocity, respectively.
2627
2628    Also convert latitude and longitude to UTM. All coordinates are
2629    assumed to be given in the GDA94 datum.
2630
2631    min's and max's: If omitted - full extend is used.
2632    To include a value min may equal it, while max must exceed it.
2633    Lat and lon are assuemd to be in decimal degrees
2634
2635    origin is a 3-tuple with geo referenced
2636    UTM coordinates (zone, easting, northing)
2637
2638    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2639    which means that longitude is the fastest
2640    varying dimension (row major order, so to speak)
2641
2642    ferret2sww uses grid points as vertices in a triangular grid
2643    counting vertices from lower left corner upwards, then right
2644    """
2645
2646    import os
2647    from Scientific.IO.NetCDF import NetCDFFile
2648    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2649    from Numeric import allclose, around
2650
2651    precision = Float
2652
2653    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2654
2655    if minlat != None:
2656        assert -90 < minlat < 90 , msg
2657    if maxlat != None:
2658        assert -90 < maxlat < 90 , msg
[4050]2659        if minlat != None:
2660            assert maxlat > minlat
[2852]2661    if minlon != None:
2662        assert -180 < minlon < 180 , msg
2663    if maxlon != None:
2664        assert -180 < maxlon < 180 , msg
[4050]2665        if minlon != None:
2666            assert maxlon > minlon
2667       
[2852]2668
2669
2670    #Get NetCDF data
2671    if verbose: print 'Reading files %s_*.nc' %basename_in
[3694]2672    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
[2852]2673    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2674    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2675    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2676    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2677
2678    if basename_out is None:
2679        swwname = basename_in + '.sww'
2680    else:
2681        swwname = basename_out + '.sww'
2682
[4418]2683    # Get dimensions of file_h
[2852]2684    for dimension in file_h.dimensions.keys():
2685        if dimension[:3] == 'LON':
2686            dim_h_longitude = dimension
2687        if dimension[:3] == 'LAT':
2688            dim_h_latitude = dimension
2689        if dimension[:4] == 'TIME':
2690            dim_h_time = dimension
2691
2692#    print 'long:', dim_h_longitude
2693#    print 'lats:', dim_h_latitude
2694#    print 'times:', dim_h_time
2695
2696    times = file_h.variables[dim_h_time]
2697    latitudes = file_h.variables[dim_h_latitude]
2698    longitudes = file_h.variables[dim_h_longitude]
[5347]2699
2700    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2701                                                  longitudes[:],
2702                                                  minlat, maxlat,
2703                                                  minlon, maxlon)
[4418]2704    # get dimensions for file_e
[2852]2705    for dimension in file_e.dimensions.keys():
2706        if dimension[:3] == 'LON':
2707            dim_e_longitude = dimension
2708        if dimension[:3] == 'LAT':
2709            dim_e_latitude = dimension
2710
[4418]2711    # get dimensions for file_u
[2852]2712    for dimension in file_u.dimensions.keys():
2713        if dimension[:3] == 'LON':
2714            dim_u_longitude = dimension
2715        if dimension[:3] == 'LAT':
2716            dim_u_latitude = dimension
2717        if dimension[:4] == 'TIME':
2718            dim_u_time = dimension
2719           
[4418]2720    # get dimensions for file_v
[2852]2721    for dimension in file_v.dimensions.keys():
2722        if dimension[:3] == 'LON':
2723            dim_v_longitude = dimension
2724        if dimension[:3] == 'LAT':
2725            dim_v_latitude = dimension
2726        if dimension[:4] == 'TIME':
2727            dim_v_time = dimension
2728
2729
[4418]2730    # Precision used by most for lat/lon is 4 or 5 decimals
[2852]2731    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2732    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2733
[4418]2734    # Check that files are compatible
[2852]2735    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2736    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2737    assert allclose(latitudes, e_lat)
2738
2739    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2740    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2741    assert allclose(longitudes, e_lon)
2742
[4418]2743    if mint is None:
[2852]2744        jmin = 0
[4418]2745        mint = times[0]       
[2852]2746    else:
2747        jmin = searchsorted(times, mint)
[4024]2748       
[4418]2749    if maxt is None:
2750        jmax = len(times)
2751        maxt = times[-1]
[2852]2752    else:
2753        jmax = searchsorted(times, maxt)
2754
[4037]2755    #print "latitudes[:]",latitudes[:]
2756    #print "longitudes[:]",longitudes [:]
[4024]2757    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2758                                                  longitudes[:],
[4418]2759                                                  minlat, maxlat,
2760                                                  minlon, maxlon)
[2852]2761
2762
2763    times = times[jmin:jmax]
2764    latitudes = latitudes[kmin:kmax]
2765    longitudes = longitudes[lmin:lmax]
2766
[4037]2767    #print "latitudes[:]",latitudes[:]
2768    #print "longitudes[:]",longitudes [:]
[2852]2769
2770    if verbose: print 'cropping'
2771    zname = 'ELEVATION'
2772
2773    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2774    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2775    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2776    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2777
2778    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2779    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2780    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2781    #        from Numeric import asarray
2782    #        elevations=elevations.tolist()
2783    #        elevations.reverse()
2784    #        elevations=asarray(elevations)
2785    #    else:
2786    #        from Numeric import asarray
2787    #        elevations=elevations.tolist()
2788    #        elevations.reverse()
2789    #        elevations=asarray(elevations)
2790    #        'print hmmm'
2791
2792
2793
2794    #Get missing values
2795    nan_ha = file_h.variables['HA'].missing_value[0]
2796    nan_ua = file_u.variables['UA'].missing_value[0]
2797    nan_va = file_v.variables['VA'].missing_value[0]
2798    if hasattr(file_e.variables[zname],'missing_value'):
2799        nan_e  = file_e.variables[zname].missing_value[0]
2800    else:
2801        nan_e = None
2802
2803    #Cleanup
2804    from Numeric import sometrue
2805
2806    missing = (amplitudes == nan_ha)
2807    if sometrue (missing):
2808        if fail_on_NaN:
2809            msg = 'NetCDFFile %s contains missing values'\
2810                  %(basename_in+'_ha.nc')
2811            raise DataMissingValuesError, msg
2812        else:
2813            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2814
2815    missing = (uspeed == nan_ua)
2816    if sometrue (missing):
2817        if fail_on_NaN:
2818            msg = 'NetCDFFile %s contains missing values'\
2819                  %(basename_in+'_ua.nc')
2820            raise DataMissingValuesError, msg
2821        else:
2822            uspeed = uspeed*(missing==0) + missing*NaN_filler
2823
2824    missing = (vspeed == nan_va)
2825    if sometrue (missing):
2826        if fail_on_NaN:
2827            msg = 'NetCDFFile %s contains missing values'\
2828                  %(basename_in+'_va.nc')
2829            raise DataMissingValuesError, msg
2830        else:
2831            vspeed = vspeed*(missing==0) + missing*NaN_filler
2832
2833
2834    missing = (elevations == nan_e)
2835    if sometrue (missing):
2836        if fail_on_NaN:
2837            msg = 'NetCDFFile %s contains missing values'\
2838                  %(basename_in+'_e.nc')
2839            raise DataMissingValuesError, msg
2840        else:
2841            elevations = elevations*(missing==0) + missing*NaN_filler
2842
2843    #######
2844
2845
2846
2847    number_of_times = times.shape[0]
2848    number_of_latitudes = latitudes.shape[0]
2849    number_of_longitudes = longitudes.shape[0]
2850
2851    assert amplitudes.shape[0] == number_of_times
2852    assert amplitudes.shape[1] == number_of_latitudes
2853    assert amplitudes.shape[2] == number_of_longitudes
2854
2855    if verbose:
2856        print '------------------------------------------------'
2857        print 'Statistics:'
2858        print '  Extent (lat/lon):'
2859        print '    lat in [%f, %f], len(lat) == %d'\
2860              %(min(latitudes.flat), max(latitudes.flat),
2861                len(latitudes.flat))
2862        print '    lon in [%f, %f], len(lon) == %d'\
2863              %(min(longitudes.flat), max(longitudes.flat),
2864                len(longitudes.flat))
2865        print '    t in [%f, %f], len(t) == %d'\
2866              %(min(times.flat), max(times.flat), len(times.flat))
2867
2868        q = amplitudes.flat
2869        name = 'Amplitudes (ha) [cm]'
2870        print %s in [%f, %f]' %(name, min(q), max(q))
2871
2872        q = uspeed.flat
2873        name = 'Speeds (ua) [cm/s]'
2874        print %s in [%f, %f]' %(name, min(q), max(q))
2875
2876        q = vspeed.flat
2877        name = 'Speeds (va) [cm/s]'
2878        print %s in [%f, %f]' %(name, min(q), max(q))
2879
2880        q = elevations.flat
2881        name = 'Elevations (e) [m]'
2882        print %s in [%f, %f]' %(name, min(q), max(q))
2883
2884
[4704]2885    # print number_of_latitudes, number_of_longitudes
[2852]2886    number_of_points = number_of_latitudes*number_of_longitudes
2887    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2888
2889
2890    file_h.close()
2891    file_u.close()
2892    file_v.close()
2893    file_e.close()
2894
2895
2896    # NetCDF file definition
2897    outfile = NetCDFFile(swwname, 'w')
2898
[4387]2899    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2900                  %(basename_in + '_ha.nc',
2901                    basename_in + '_ua.nc',
2902                    basename_in + '_va.nc',
2903                    basename_in + '_e.nc')
[4388]2904   
[4704]2905    # Create new file
[4416]2906    starttime = times[0]
[4862]2907   
[4455]2908    sww = Write_sww()
[4704]2909    sww.store_header(outfile, times, number_of_volumes,
2910                     number_of_points, description=description,
[4862]2911                     verbose=verbose,sww_precision=Float)
[2852]2912
[4704]2913    # Store
[3514]2914    from anuga.coordinate_transforms.redfearn import redfearn
[2852]2915    x = zeros(number_of_points, Float)  #Easting
2916    y = zeros(number_of_points, Float)  #Northing
2917
2918
2919    if verbose: print 'Making triangular grid'
[4704]2920
2921    # Check zone boundaries
[2852]2922    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2923
2924    vertices = {}
2925    i = 0
2926    for k, lat in enumerate(latitudes):       #Y direction
2927        for l, lon in enumerate(longitudes):  #X direction
2928
2929            vertices[l,k] = i
2930
2931            zone, easting, northing = redfearn(lat,lon)
2932
2933            msg = 'Zone boundary crossed at longitude =', lon
2934            #assert zone == refzone, msg
2935            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2936            x[i] = easting
2937            y[i] = northing
2938            i += 1
2939
2940    #Construct 2 triangles per 'rectangular' element
2941    volumes = []
2942    for l in range(number_of_longitudes-1):    #X direction
2943        for k in range(number_of_latitudes-1): #Y direction
2944            v1 = vertices[l,k+1]
2945            v2 = vertices[l,k]
2946            v3 = vertices[l+1,k+1]
2947            v4 = vertices[l+1,k]
2948
2949            volumes.append([v1,v2,v3]) #Upper element
2950            volumes.append([v4,v3,v2]) #Lower element
2951
2952    volumes = array(volumes)
2953
[4387]2954    if origin is None:
2955        origin = Geo_reference(refzone,min(x),min(y))
2956    geo_ref = write_NetCDF_georeference(origin, outfile)
2957   
[2852]2958    if elevation is not None:
2959        z = elevation
2960    else:
2961        if inverted_bathymetry:
2962            z = -1*elevations
2963        else:
2964            z = elevations
2965    #FIXME: z should be obtained from MOST and passed in here
2966
[4862]2967    #FIXME use the Write_sww instance(sww) to write this info
[2852]2968    from Numeric import resize
2969    z = resize(z,outfile.variables['z'][:].shape)
[4387]2970    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2971    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
[3954]2972    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
[2852]2973    outfile.variables['elevation'][:] = z
[3954]2974    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
[2852]2975
2976
2977
2978    #Time stepping
2979    stage = outfile.variables['stage']
2980    xmomentum = outfile.variables['xmomentum']
2981    ymomentum = outfile.variables['ymomentum']
2982
2983    if verbose: print 'Converting quantities'
2984    n = len(times)
2985    for j in range(n):
2986        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2987        i = 0
2988        for k in range(number_of_latitudes):      #Y direction
2989            for l in range(number_of_longitudes): #X direction
2990                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2991                stage[j,i] = w
2992                h = w - z[i]
2993                xmomentum[j,i] = uspeed[j,k,l]/100*h
2994                ymomentum[j,i] = vspeed[j,k,l]/100*h
2995                i += 1
2996
2997    #outfile.close()
2998
2999    #FIXME: Refactor using code from file_function.statistics
3000    #Something like print swwstats(swwname)
3001    if verbose:
3002        x = outfile.variables['x'][:]
3003        y = outfile.variables['y'][:]
3004        print '------------------------------------------------'
3005        print 'Statistics of output file:'
3006        print '  Name: %s' %swwname
3007        print '  Reference:'
3008        print '    Lower left corner: [%f, %f]'\
[4387]3009              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
[4416]3010        print ' Start time: %f' %starttime
[4418]3011        print '    Min time: %f' %mint
3012        print '    Max time: %f' %maxt
[2852]3013        print '  Extent:'
3014        print '    x [m] in [%f, %f], len(x) == %d'\
3015              %(min(x.flat), max(x.flat), len(x.flat))
3016        print '    y [m] in [%f, %f], len(y) == %d'\
3017              %(min(y.flat), max(y.flat), len(y.flat))
3018        print '    t [s] in [%f, %f], len(t) == %d'\
3019              %(min(times), max(times), len(times))
3020        print '  Quantities [SI units]:'
3021        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3022            q = outfile.variables[name][:].flat
3023            print '    %s in [%f, %f]' %(name, min(q), max(q))
3024
3025
3026
3027    outfile.close()
3028
3029
3030
3031
3032
[4303]3033def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
[2852]3034    """Template for converting typical text files with time series to
3035    NetCDF tms file.
3036
3037
3038    The file format is assumed to be either two fields separated by a comma:
3039
3040        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3041
3042    E.g
3043
3044      31/08/04 00:00:00, 1.328223 0 0
3045      31/08/04 00:15:00, 1.292912 0 0
3046
[4303]3047    or time (seconds), value0 value1 value2 ...
3048   
3049      0.0, 1.328223 0 0
3050      0.1, 1.292912 0 0
3051
[2852]3052    will provide a time dependent function f(t) with three attributes
3053
3054    filename is assumed to be the rootname with extenisons .txt and .sww
3055    """
3056
3057    import time, calendar
3058    from Numeric import array
[3514]3059    from anuga.config import time_format
3060    from anuga.utilities.numerical_tools import ensure_numeric
[2852]3061
3062
3063
3064    fid = open(filename + '.txt')
3065    line = fid.readline()
3066    fid.close()
3067
3068    fields = line.split(',')
3069    msg = 'File %s must have the format date, value0 value1 value2 ...'
3070    assert len(fields) == 2, msg
3071
[4303]3072    if not time_as_seconds:
3073        try:
3074            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3075        except ValueError:
3076            msg = 'First field in file %s must be' %filename
3077            msg += ' date-time with format %s.\n' %time_format
3078            msg += 'I got %s instead.' %fields[0]
3079            raise DataTimeError, msg
3080    else:
3081        try:
3082            starttime = float(fields[0])
3083        except Error:
3084            msg = "Bad time format"
3085            raise DataTimeError, msg
[2852]3086
3087
3088    #Split values
3089    values = []
3090    for value in fields[1].split():
3091        values.append(float(value))
3092
3093    q = ensure_numeric(values)
3094
3095    msg = 'ERROR: File must contain at least one independent value'
3096    assert len(q.shape) == 1, msg
3097
3098
3099
3100    #Read times proper
3101    from Numeric import zeros, Float, alltrue
[3514]3102    from anuga.config import time_format
[2852]3103    import time, calendar
3104
3105    fid = open(filename + '.txt')
3106    lines = fid.readlines()
3107    fid.close()
3108
3109    N = len(lines)
3110    d = len(q)
3111
3112    T = zeros(N, Float)       #Time
3113    Q = zeros((N, d), Float)  #Values
3114
3115    for i, line in enumerate(lines):
3116        fields = line.split(',')
[4303]3117        if not time_as_seconds:
3118            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3119        else:
3120             realtime = float(fields[0])
[2852]3121        T[i] = realtime - starttime
3122
3123        for j, value in enumerate(fields[1].split()):
3124            Q[i, j] = float(value)
3125
3126    msg = 'File %s must list time as a monotonuosly ' %filename
3127    msg += 'increasing sequence'
3128    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3129
3130    #Create NetCDF file
3131    from Scientific.IO.NetCDF import NetCDFFile
3132
3133    fid = NetCDFFile(filename + '.tms', 'w')
3134
3135
3136    fid.institution = 'Geoscience Australia'
3137    fid.description = 'Time series'
3138
3139
3140    #Reference point
3141    #Start time in seconds since the epoch (midnight 1/1/1970)
3142    #FIXME: Use Georef
3143    fid.starttime = starttime
3144
3145    # dimension definitions
3146    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3147    #fid.createDimension('number_of_vertices', 3)
3148
3149
3150    fid.createDimension('number_of_timesteps', len(T))
3151
3152    fid.createVariable('time', Float, ('number_of_timesteps',))
3153
3154    fid.variables['time'][:] = T
3155
3156    for i in range(Q.shape[1]):
3157        try:
3158            name = quantity_names[i]
3159        except:
3160            name = 'Attribute%d'%i
3161
3162        fid.createVariable(name, Float, ('number_of_timesteps',))
3163        fid.variables[name][:] = Q[:,i]
3164
3165    fid.close()
3166
3167
3168def extent_sww(file_name):
3169    """
3170    Read in an sww file.
3171
3172    Input;
3173    file_name - the sww file
3174
3175    Output;
3176    z - Vector of bed elevation
3177    volumes - Array.  Each row has 3 values, representing
3178    the vertices that define the volume
3179    time - Vector of the times where there is stage information
3180    stage - array with respect to time and vertices (x,y)
3181    """
3182
3183
3184    from Scientific.IO.NetCDF import NetCDFFile
3185
3186    #Check contents
3187    #Get NetCDF
3188    fid = NetCDFFile(file_name, 'r')
3189
3190    # Get the variables
3191    x = fid.variables['x'][:]
3192    y = fid.variables['y'][:]
3193    stage = fid.variables['stage'][:]
3194    #print "stage",stage
3195    #print "stage.shap",stage.shape
3196    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3197    #print "min(stage)",min(stage)
3198
3199    fid.close()
3200
3201    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3202
3203
[5276]3204def sww2domain(filename, boundary=None, t=None,
3205               fail_if_NaN=True ,NaN_filler=0,
3206               verbose = False, very_verbose = False):
[2852]3207    """
3208    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3209
3210    Boundary is not recommended if domain.smooth is not selected, as it
3211    uses unique coordinates, but not unique boundaries. This means that
3212    the boundary file will not be compatable with the coordinates, and will
3213    give a different final boundary, or crash.
3214    """
3215    NaN=9.969209968386869e+036
3216    #initialise NaN.
3217
3218    from Scientific.IO.NetCDF import NetCDFFile
3219    from shallow_water import Domain
3220    from Numeric import asarray, transpose, resize
3221
3222    if verbose: print 'Reading from ', filename
3223    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3224    time = fid.variables['time']       #Timesteps
3225    if t is None:
3226        t = time[-1]
3227    time_interp = get_time_interp(time,t)
3228
3229    # Get the variables as Numeric arrays
3230    x = fid.variables['x'][:]             #x-coordinates of vertices
3231    y = fid.variables['y'][:]             #y-coordinates of vertices
3232    elevation = fid.variables['elevation']     #Elevation
3233    stage = fid.variables['stage']     #Water level
3234    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3235    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3236
3237    starttime = fid.starttime[0]
3238    volumes = fid.variables['volumes'][:] #Connectivity
[5276]3239    coordinates = transpose(asarray([x.tolist(),y.tolist()]))
3240    #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 )
3241    # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 )
[2852]3242
3243    conserved_quantities = []
3244    interpolated_quantities = {}
3245    other_quantities = []
3246
3247    # get geo_reference
3248    #sww files don't have to have a geo_ref
3249    try:
3250        geo_reference = Geo_reference(NetCDFObject=fid)
3251    except: #AttributeError, e:
3252        geo_reference = None
3253
3254    if verbose: print '    getting quantities'
3255    for quantity in fid.variables.keys():
3256        dimensions = fid.variables[quantity].dimensions
3257        if 'number_of_timesteps' in dimensions:
3258            conserved_quantities.append(quantity)
3259            interpolated_quantities[quantity]=\
3260                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3261        else: other_quantities.append(quantity)
3262
3263    other_quantities.remove('x')
3264    other_quantities.remove('y')
3265    other_quantities.remove('z')
3266    other_quantities.remove('volumes')
[4455]3267    try:
3268        other_quantities.remove('stage_range')
3269        other_quantities.remove('xmomentum_range')
3270        other_quantities.remove('ymomentum_range')
3271        other_quantities.remove('elevation_range')
3272    except:
3273        pass
3274       
[2852]3275
3276    conserved_quantities.remove('time')
3277
3278    if verbose: print '    building domain'
3279    #    From domain.Domain:
3280    #    domain = Domain(coordinates, volumes,\
3281    #                    conserved_quantities = conserved_quantities,\
3282    #                    other_quantities = other_quantities,zone=zone,\
3283    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3284
3285    #   From shallow_water.Domain:
3286    coordinates=coordinates.tolist()
3287    volumes=volumes.tolist()
3288    #FIXME:should this be in mesh?(peter row)
3289    if fid.smoothing == 'Yes': unique = False
3290    else: unique = True
3291    if unique:
3292        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3293
3294
3295    try:
3296        domain = Domain(coordinates, volumes, boundary)
3297    except AssertionError, e:
3298        fid.close()
3299        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3300        raise DataDomainError, msg
3301
3302    if not boundary is None:
3303        domain.boundary = boundary
3304
3305    domain.geo_reference = geo_reference
3306
3307    domain.starttime=float(starttime)+float(t)
3308    domain.time=0.0
3309
3310    for quantity in other_quantities:
3311        try:
3312            NaN = fid.variables[quantity].missing_value
3313        except:
3314            pass #quantity has no missing_value number
3315        X = fid.variables[quantity][:]
3316        if very_verbose:
3317            print '       ',quantity
3318            print '        NaN =',NaN
3319            print '        max(X)'
3320            print '       ',max(X)
3321            print '        max(X)==NaN'
3322            print '       ',max(X)==NaN
3323            print ''
3324        if (max(X)==NaN) or (min(X)==NaN):
3325            if fail_if_NaN:
3326                msg = 'quantity "%s" contains no_data entry'%quantity
3327                raise DataMissingValuesError, msg
3328            else:
3329                data = (X<>NaN)
3330                X = (X*data)+(data==0)*NaN_filler
3331        if unique:
3332            X = resize(X,(len(X)/3,3))
3333        domain.set_quantity(quantity,X)
3334    #
3335    for quantity in conserved_quantities:
3336        try:
3337            NaN = fid.variables[quantity].missing_value
3338        except:
3339            pass #quantity has no missing_value number
3340        X = interpolated_quantities[quantity]
3341        if very_verbose:
3342            print '       ',quantity
3343            print '        NaN =',NaN
3344            print '        max(X)'
3345            print '       ',max(X)
3346            print '        max(X)==NaN'
3347            print '       ',max(X)==NaN
3348            print ''
3349        if (max(X)==NaN) or (min(X)==NaN):
3350            if fail_if_NaN:
3351                msg = 'quantity "%s" contains no_data entry'%quantity
3352                raise DataMissingValuesError, msg
3353            else:
3354                data = (X<>NaN)
3355                X = (X*data)+(data==0)*NaN_filler
3356        if unique:
3357            X = resize(X,(X.shape[0]/3,3))
3358        domain.set_quantity(quantity,X)
3359
3360    fid.close()
3361    return domain
3362
[5276]3363
[2852]3364def interpolated_quantity(saved_quantity,time_interp):
3365
3366    #given an index and ratio, interpolate quantity with respect to time.
3367    index,ratio = time_interp
3368    Q = saved_quantity
3369    if ratio > 0:
3370        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3371    else:
3372        q = Q[index]
3373    #Return vector of interpolated values
3374    return q
3375
[5276]3376
[2852]3377def get_time_interp(time,t=None):
3378    #Finds the ratio and index for time interpolation.
[3560]3379    #It is borrowed from previous abstract_2d_finite_volumes code.
[2852]3380    if t is None:
3381        t=time[-1]
3382        index = -1
3383        ratio = 0.
3384    else:
3385        T = time
3386        tau = t
3387        index=0
3388        msg = 'Time interval derived from file %s [%s:%s]'\
3389            %('FIXMEfilename', T[0], T[-1])
3390        msg += ' does not match model time: %s' %tau
3391        if tau < time[0]: raise DataTimeError, msg
3392        if tau > time[-1]: raise DataTimeError, msg
3393        while tau > time[index]: index += 1
3394        while tau < time[index]: index -= 1
3395        if tau == time[index]:
3396            #Protect against case where tau == time[-1] (last time)
3397            # - also works in general when tau == time[i]
3398            ratio = 0
3399        else:
3400            #t is now between index and index+1
3401            ratio = (tau - time[index])/(time[index+1] - time[index])
3402    return (index,ratio)
3403
3404
3405def weed(coordinates,volumes,boundary = None):
[4455]3406    if type(coordinates)==ArrayType:
[2852]3407        coordinates = coordinates.tolist()
[4455]3408    if type(volumes)==ArrayType:
[2852]3409        volumes = volumes.tolist()
3410
3411    unique = False
3412    point_dict = {}
3413    same_point = {}
3414    for i in range(len(coordinates)):
3415        point = tuple(coordinates[i])
3416        if point_dict.has_key(point):
3417            unique = True
3418            same_point[i]=point
3419            #to change all point i references to point j
3420        else:
3421            point_dict[point]=i
3422            same_point[i]=point
3423
3424    coordinates = []
3425    i = 0
3426    for point in point_dict.keys():
3427        point = tuple(point)
3428        coordinates.append(list(point))
3429        point_dict[point]=i
3430        i+=1
3431
3432
3433    for volume in volumes:
3434        for i in range(len(volume)):
3435            index = volume[i]
3436            if index>-1:
3437                volume[i]=point_dict[same_point[index]]
3438
3439    new_boundary = {}
3440    if not boundary is None:
3441        for segment in boundary.keys():
3442            point0 = point_dict[same_point[segment[0]]]
3443            point1 = point_dict[same_point[segment[1]]]
3444            label = boundary[segment]
3445            #FIXME should the bounday attributes be concaterated
3446            #('exterior, pond') or replaced ('pond')(peter row)
3447
3448            if new_boundary.has_key((point0,point1)):
3449                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3450                                              #+','+label
3451
3452            elif new_boundary.has_key((point1,point0)):
3453                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3454                                              #+','+label
3455            else: new_boundary[(point0,point1)]=label
3456
3457        boundary = new_boundary
3458
3459    return coordinates,volumes,boundary
3460
3461
3462def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3463                 verbose=False):
3464    """Read Digitial Elevation model from the following NetCDF format (.dem)
3465
3466    Example:
3467
3468    ncols         3121
3469    nrows         1800
3470    xllcorner     722000
3471    yllcorner     5893000
3472    cellsize      25
3473    NODATA_value  -9999
3474    138.3698 137.4194 136.5062 135.5558 ..........
3475
3476    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3477    """
3478
3479    import os
3480    from Scientific.IO.NetCDF import NetCDFFile
3481    from Numeric import Float, zeros, sum, reshape, equal
3482
3483    root = basename_in
3484    inname = root + '.dem'
3485
3486    #Open existing netcdf file to read
3487    infile = NetCDFFile(inname, 'r')
3488    if verbose: print 'Reading DEM from %s' %inname
3489
3490    #Read metadata
3491    ncols = infile.ncols[0]
3492    nrows = infile.nrows[0]
3493    xllcorner = infile.xllcorner[0]
3494    yllcorner = infile.yllcorner[0]
3495    cellsize = infile.cellsize[0]
3496    NODATA_value = infile.NODATA_value[0]
3497    zone = infile.zone[0]
3498    false_easting = infile.false_easting[0]
3499    false_northing = infile.false_northing[0]
3500    projection = infile.projection
3501    datum = infile.datum
3502    units = infile.units
3503
3504    dem_elevation = infile.variables['elevation']
3505
3506    #Get output file name
3507    if basename_out == None:
3508        outname = root + '_' + repr(cellsize_new) + '.dem'
3509    else:
3510        outname = basename_out + '.dem'
3511
3512    if verbose: print 'Write decimated NetCDF file to %s' %outname
3513
3514    #Determine some dimensions for decimated grid
3515    (nrows_stencil, ncols_stencil) = stencil.shape
3516    x_offset = ncols_stencil / 2
3517    y_offset = nrows_stencil / 2
3518    cellsize_ratio = int(cellsize_new / cellsize)
3519    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3520    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3521
3522    #Open netcdf file for output
3523    outfile = NetCDFFile(outname, 'w')
3524
3525    #Create new file
3526    outfile.institution = 'Geoscience Australia'
3527    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3528                           'of spatial point data'
3529    #Georeferencing
3530    outfile.zone = zone
3531    outfile.projection = projection
3532    outfile.datum = datum
3533    outfile.units = units
3534
3535    outfile.cellsize = cellsize_new
3536    outfile.NODATA_value = NODATA_value
3537    outfile.false_easting = false_easting
3538    outfile.false_northing = false_northing
3539
3540    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3541    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3542    outfile.ncols = ncols_new
3543    outfile.nrows = nrows_new
3544
3545    # dimension definition
3546    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3547
3548    # variable definition
3549    outfile.createVariable('elevation', Float, ('number_of_points',))
3550
3551    # Get handle to the variable
3552    elevation = outfile.variables['elevation']
3553
3554    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3555
3556    #Store data
3557    global_index = 0
3558    for i in range(nrows_new):
3559        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3560        lower_index = global_index
3561        telev =  zeros(ncols_new, Float)
3562        local_index = 0
3563        trow = i * cellsize_ratio
3564
3565        for j in range(ncols_new):
3566            tcol = j * cellsize_ratio
3567            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3568
3569            #if dem contains 1 or more NODATA_values set value in
3570            #decimated dem to NODATA_value, else compute decimated
3571            #value using stencil
3572            if sum(sum(equal(tmp, NODATA_value))) > 0:
3573                telev[local_index] = NODATA_value
3574            else:
3575                telev[local_index] = sum(sum(tmp * stencil))
3576
3577            global_index += 1
3578            local_index += 1
3579
3580        upper_index = global_index
3581
3582        elevation[lower_index:upper_index] = telev
3583
3584    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3585
3586    infile.close()
3587    outfile.close()
3588
3589
3590
3591
[4868]3592def tsh2sww(filename, verbose=False): 
[2852]3593    """
3594    to check if a tsh/msh file 'looks' good.
3595    """
3596
3597
3598    if verbose == True:print 'Creating domain from', filename
3599    domain = pmesh_to_domain_instance(filename, Domain)
3600    if verbose == True:print "Number of triangles = ", len(domain)
3601
3602    domain.smooth = True
3603    domain.format = 'sww'   #Native netcdf visualisation format
3604    file_path, filename = path.split(filename)
3605    filename, ext = path.splitext(filename)
[3846]3606    domain.set_name(filename)   
[2852]3607    domain.reduction = mean
3608    if verbose == True:print "file_path",file_path
3609    if file_path == "":file_path = "."
3610    domain.set_datadir(file_path)
3611
3612    if verbose == True:
3613        print "Output written to " + domain.get_datadir() + sep + \
[3846]3614              domain.get_name() + "." + domain.format
[2852]3615    sww = get_dataobject(domain)
3616    sww.store_connectivity()
[4868]3617    sww.store_timestep()
[2852]3618
3619
3620def asc_csiro2sww(bath_dir,
3621                  elevation_dir,
3622                  ucur_dir,
3623                  vcur_dir,
3624                  sww_file,
3625                  minlat = None, maxlat = None,
3626                  minlon = None, maxlon = None,
3627                  zscale=1,
3628                  mean_stage = 0,
3629                  fail_on_NaN = True,
3630                  elevation_NaN_filler = 0,
3631                  bath_prefix='ba',
3632                  elevation_prefix='el',
3633                  verbose=False):
3634    """
3635    Produce an sww boundary file, from esri ascii data from CSIRO.
3636
3637    Also convert latitude and longitude to UTM. All coordinates are
3638    assumed to be given in the GDA94 datum.
3639
3640    assume:
3641    All files are in esri ascii format
3642
3643    4 types of information
3644    bathymetry
3645    elevation
3646    u velocity
3647    v velocity
3648
3649    Assumptions
3650    The metadata of all the files is the same
3651    Each type is in a seperate directory
3652    One bath file with extention .000
3653    The time period is less than 24hrs and uniform.
3654    """
3655    from Scientific.IO.NetCDF import NetCDFFile
3656
[3514]3657    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3658
3659    precision = Float # So if we want to change the precision its done here
3660
3661    # go in to the bath dir and load the only file,
3662    bath_files = os.listdir(bath_dir)
3663
3664    bath_file = bath_files[0]
3665    bath_dir_file =  bath_dir + os.sep + bath_file
3666    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3667
3668    #Use the date.time of the bath file as a basis for
3669    #the start time for other files
3670    base_start = bath_file[-12:]
3671
3672    #go into the elevation dir and load the 000 file
3673    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3674                         + base_start
3675
3676    elevation_files = os.listdir(elevation_dir)
3677    ucur_files = os.listdir(ucur_dir)
3678    vcur_files = os.listdir(vcur_dir)
[4031]3679    elevation_files.sort()
[2852]3680    # the first elevation file should be the
3681    # file with the same base name as the bath data
3682    assert elevation_files[0] == 'el' + base_start
3683
3684    number_of_latitudes = bath_grid.shape[0]
3685    number_of_longitudes = bath_grid.shape[1]
3686    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3687
3688    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3689                  for x in range(number_of_longitudes)]
3690    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3691                 for y in range(number_of_latitudes)]
3692
3693     # reverse order of lat, so the fist lat represents the first grid row
3694    latitudes.reverse()
3695
[4027]3696    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
[2852]3697                                                 minlat=minlat, maxlat=maxlat,
3698                                                 minlon=minlon, maxlon=maxlon)
3699
3700
3701    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3702    latitudes = latitudes[kmin:kmax]
3703    longitudes = longitudes[lmin:lmax]
3704    number_of_latitudes = len(latitudes)
3705    number_of_longitudes = len(longitudes)
3706    number_of_times = len(os.listdir(elevation_dir))
3707    number_of_points = number_of_latitudes*number_of_longitudes
3708    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3709
3710    #Work out the times
3711    if len(elevation_files) > 1:
3712        # Assume: The time period is less than 24hrs.
3713        time_period = (int(elevation_files[1][-3:]) - \
3714                      int(elevation_files[0][-3:]))*60*60
3715        times = [x*time_period for x in range(len(elevation_files))]
3716    else:
3717        times = [0.0]
3718
3719
3720    if verbose:
3721        print '------------------------------------------------'
3722        print 'Statistics:'
3723        print '  Extent (lat/lon):'
3724        print '    lat in [%f, %f], len(lat) == %d'\
3725              %(min(latitudes), max(latitudes),
3726                len(latitudes))
3727        print '    lon in [%f, %f], len(lon) == %d'\
3728              %(min(longitudes), max(longitudes),
3729                len(longitudes))
3730        print '    t in [%f, %f], len(t) == %d'\
3731              %(min(times), max(times), len(times))
3732
3733    ######### WRITE THE SWW FILE #############
3734    # NetCDF file definition
3735    outfile = NetCDFFile(sww_file, 'w')
3736
3737    #Create new file
3738    outfile.institution = 'Geoscience Australia'
3739    outfile.description = 'Converted from XXX'
3740
3741
3742    #For sww compatibility
3743    outfile.smoothing = 'Yes'
3744    outfile.order = 1
3745
3746    #Start time in seconds since the epoch (midnight 1/1/1970)
3747    outfile.starttime = starttime = times[0]
3748
3749
3750    # dimension definitions
3751    outfile.createDimension('number_of_volumes', number_of_volumes)
3752
3753    outfile.createDimension('number_of_vertices', 3)
3754    outfile.createDimension('number_of_points', number_of_points)
3755    outfile.createDimension('number_of_timesteps', number_of_times)
3756
3757    # variable definitions
3758    outfile.createVariable('x', precision, ('number_of_points',))
3759    outfile.createVariable('y', precision, ('number_of_points',))
3760    outfile.createVariable('elevation', precision, ('number_of_points',))
3761
3762    #FIXME: Backwards compatibility
3763    outfile.createVariable('z', precision, ('number_of_points',))
3764    #################################
3765
3766    outfile.createVariable('volumes', Int, ('number_of_volumes',
3767                                            'number_of_vertices'))
3768
3769    outfile.createVariable('time', precision,
3770                           ('number_of_timesteps',))
3771
3772    outfile.createVariable('stage', precision,
3773                           ('number_of_timesteps',
3774                            'number_of_points'))
3775
3776    outfile.createVariable('xmomentum', precision,
3777                           ('number_of_timesteps',
3778                            'number_of_points'))
3779
3780    outfile.createVariable('ymomentum', precision,
3781                           ('number_of_timesteps',
3782                            'number_of_points'))
3783
3784    #Store
[3514]3785    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3786    x = zeros(number_of_points, Float)  #Easting
3787    y = zeros(number_of_points, Float)  #Northing
3788
3789    if verbose: print 'Making triangular grid'
3790    #Get zone of 1st point.
3791    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3792
3793    vertices = {}
3794    i = 0
3795    for k, lat in enumerate(latitudes):
3796        for l, lon in enumerate(longitudes):
3797
3798            vertices[l,k] = i
3799
3800            zone, easting, northing = redfearn(lat,lon)
3801
3802            msg = 'Zone boundary crossed at longitude =', lon
3803            #assert zone == refzone, msg
3804            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3805            x[i] = easting
3806            y[i] = northing
3807            i += 1
3808
3809
3810    #Construct 2 triangles per 'rectangular' element
3811    volumes = []
3812    for l in range(number_of_longitudes-1):    #X direction
3813        for k in range(number_of_latitudes-1): #Y direction
3814            v1 = vertices[l,k+1]
3815            v2 = vertices[l,k]
3816            v3 = vertices[l+1,k+1]
3817            v4 = vertices[l+1,k]
3818
3819            #Note, this is different to the ferrit2sww code
3820            #since the order of the lats is reversed.
3821            volumes.append([v1,v3,v2]) #Upper element
3822            volumes.append([v4,v2,v3]) #Lower element
3823
3824    volumes = array(volumes)
3825
3826    geo_ref = Geo_reference(refzone,min(x),min(y))
3827    geo_ref.write_NetCDF(outfile)
3828
3829    # This will put the geo ref in the middle
3830    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3831
3832
3833    if verbose:
3834        print '------------------------------------------------'
3835        print 'More Statistics:'
3836        print '  Extent (/lon):'
3837        print '    x in [%f, %f], len(lat) == %d'\
3838              %(min(x), max(x),
3839                len(x))
3840        print '    y in [%f, %f], len(lon) == %d'\
3841              %(min(y), max(y),
3842                len(y))
3843        print 'geo_ref: ',geo_ref
3844
3845    z = resize(bath_grid,outfile.variables['z'][:].shape)
3846    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3847    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3848    outfile.variables['z'][:] = z
3849    outfile.variables['elevation'][:] = z  #FIXME HACK
3850    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3851
3852    stage = outfile.variables['stage']
3853    xmomentum = outfile.variables['xmomentum']
3854    ymomentum = outfile.variables['ymomentum']
3855
3856    outfile.variables['time'][:] = times   #Store time relative
3857
3858    if verbose: print 'Converting quantities'
3859    n = number_of_times
3860    for j in range(number_of_times):
3861        # load in files
3862        elevation_meta, elevation_grid = \
3863           _read_asc(elevation_dir + os.sep + elevation_files[j])
3864
3865        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3866        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3867
3868        #cut matrix to desired size
3869        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3870        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3871        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
[4271]3872       
[2852]3873        # handle missing values
3874        missing = (elevation_grid == elevation_meta['NODATA_value'])
3875        if sometrue (missing):
3876            if fail_on_NaN:
3877                msg = 'File %s contains missing values'\
3878                      %(elevation_files[j])
3879                raise DataMissingValuesError, msg
3880            else:
3881                elevation_grid = elevation_grid*(missing==0) + \
3882                                 missing*elevation_NaN_filler
3883
3884
3885        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3886        i = 0
3887        for k in range(number_of_latitudes):      #Y direction
3888            for l in range(number_of_longitudes): #X direction
3889                w = zscale*elevation_grid[k,l] + mean_stage
3890                stage[j,i] = w
3891                h = w - z[i]
3892                xmomentum[j,i] = u_momentum_grid[k,l]*h
3893                ymomentum[j,i] = v_momentum_grid[k,l]*h
3894                i += 1
3895    outfile.close()
3896
[4037]3897def _get_min_max_indexes(latitudes_ref,longitudes_ref,
[2852]3898                        minlat=None, maxlat=None,
3899                        minlon=None, maxlon=None):
3900    """
3901    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3902    specified with min/max lat/long
3903
3904    Think of the latitudes and longitudes describing a 2d surface.
3905    The area returned is, if possible, just big enough to cover the
3906    inputed max/min area. (This will not be possible if the max/min area
3907    has a section outside of the latitudes/longitudes area.)
3908
[4037]3909    asset  longitudes are sorted,
[2852]3910    long - from low to high (west to east, eg 148 - 151)
[4037]3911    assert latitudes are sorted, ascending or decending
[2852]3912    """
[4037]3913    latitudes = latitudes_ref[:]
3914    longitudes = longitudes_ref[:]
[2852]3915
[4037]3916    latitudes = ensure_numeric(latitudes)
3917    longitudes = ensure_numeric(longitudes)
[5347]3918
[4037]3919    assert allclose(sort(longitudes), longitudes)
[5347]3920
[5352]3921    #print latitudes[0],longitudes[0]
3922    #print len(latitudes),len(longitudes)
3923    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
[4024]3924   
[4037]3925    lat_ascending = True
3926    if not allclose(sort(latitudes), latitudes):
3927        lat_ascending = False
3928        # reverse order of lat, so it's in ascending order         
3929        latitudes = latitudes[::-1]
3930        assert allclose(sort(latitudes), latitudes)
3931    #print "latitudes  in funct", latitudes
3932   
[2852]3933    largest_lat_index = len(latitudes)-1
3934    #Cut out a smaller extent.
3935    if minlat == None:
3936        lat_min_index = 0
3937    else:
3938        lat_min_index = searchsorted(latitudes, minlat)-1
3939        if lat_min_index <0:
3940            lat_min_index = 0
3941
3942
3943    if maxlat == None:
3944        lat_max_index = largest_lat_index #len(latitudes)
3945    else:
3946        lat_max_index = searchsorted(latitudes, maxlat)
3947        if lat_max_index > largest_lat_index:
3948            lat_max_index = largest_lat_index
3949
3950    if minlon == None:
3951        lon_min_index = 0
3952    else:
3953        lon_min_index = searchsorted(longitudes, minlon)-1
3954        if lon_min_index <0:
3955            lon_min_index = 0
3956
3957    if maxlon == None:
3958        lon_max_index = len(longitudes)
3959    else:
3960        lon_max_index = searchsorted(longitudes, maxlon)
3961
[4037]3962    # Reversing the indexes, if the lat array is decending
3963    if lat_ascending is False:
3964        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3965                                       largest_lat_index - lat_min_index
[2852]3966    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3967    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3968
3969    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3970
3971
3972def _read_asc(filename, verbose=False):
3973    """Read esri file from the following ASCII format (.asc)
3974
3975    Example:
3976
3977    ncols         3121
3978    nrows         1800
3979    xllcorner     722000
3980    yllcorner     5893000
3981    cellsize      25
3982    NODATA_value  -9999
3983    138.3698 137.4194 136.5062 135.5558 ..........
3984
3985    """
3986
3987    datafile = open(filename)
3988
3989    if verbose: print 'Reading DEM from %s' %(filename)
3990    lines = datafile.readlines()
3991    datafile.close()
3992
3993    if verbose: print 'Got', len(lines), ' lines'
3994
3995    ncols = int(lines.pop(0).split()[1].strip())
3996    nrows = int(lines.pop(0).split()[1].strip())
3997    xllcorner = float(lines.pop(0).split()[1].strip())
3998    yllcorner = float(lines.pop(0).split()[1].strip())
3999    cellsize = float(lines.pop(0).split()[1].strip())
4000    NODATA_value = float(lines.pop(0).split()[1].strip())
4001
4002    assert len(lines) == nrows
4003
4004    #Store data
4005    grid = []
4006
4007    n = len(lines)
4008    for i, line in enumerate(lines):
4009        cells = line.split()
4010        assert len(cells) == ncols
4011        grid.append(array([float(x) for x in cells]))
4012    grid = array(grid)
4013
4014    return {'xllcorner':xllcorner,
4015            'yllcorner':yllcorner,
4016            'cellsize':cellsize,
4017            'NODATA_value':NODATA_value}, grid
4018
[2884]4019
[2852]4020
[3720]4021    ####  URS 2 SWW  ###
4022
4023lon_name = 'LON'
4024lat_name = 'LAT'
4025time_name = 'TIME'
4026precision = Float # So if we want to change the precision its done here       
4027class Write_nc:
4028    """
4029    Write an nc file.
4030   
4031    Note, this should be checked to meet cdc netcdf conventions for gridded
4032    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4033   
4034    """
4035    def __init__(self,
4036                 quantity_name,
4037                 file_name,
4038                 time_step_count,
4039                 time_step,
4040                 lon,
4041                 lat):
4042        """
4043        time_step_count is the number of time steps.
4044        time_step is the time step size
4045       
4046        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4047        """
4048        self.quantity_name = quantity_name
[4348]4049        quantity_units = {'HA':'CENTIMETERS',
[4302]4050                              'UA':'CENTIMETERS/SECOND',
[4348]4051                              'VA':'CENTIMETERS/SECOND'}       
[3720]4052       
[4348]4053        multiplier_dic = {'HA':100.0, # To convert from m to cm
4054                              'UA':100.0,  #  m/s to cm/sec
4055                              'VA':-100.0}  # MUX files have positve x in the
4056        # Southern direction.  This corrects for it, when writing nc files.
4057       
4058        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4059       
[3720]4060        #self.file_name = file_name
4061        self.time_step_count = time_step_count
4062        self.time_step = time_step
4063
4064        # NetCDF file definition
4065        self.outfile = NetCDFFile(file_name, 'w')
4066        outfile = self.outfile       
4067
4068        #Create new file
4069        nc_lon_lat_header(outfile, lon, lat)
4070   
4071        # TIME
4072        outfile.createDimension(time_name, None)
4073        outfile.createVariable(time_name, precision, (time_name,))
4074
4075        #QUANTITY
4076        outfile.createVariable(self.quantity_name, precision,
4077                               (time_name, lat_name, lon_name))
4078        outfile.variables[self.quantity_name].missing_value=-1.e+034
4079        outfile.variables[self.quantity_name].units= \
4080                                 quantity_units[self.quantity_name]
4081        outfile.variables[lon_name][:]= ensure_numeric(lon)
4082        outfile.variables[lat_name][:]= ensure_numeric(lat)
4083
4084        #Assume no one will be wanting to read this, while we are writing
4085        #outfile.close()
4086       
4087    def store_timestep(self, quantity_slice):
4088        """
[4348]4089        Write a time slice of quantity info
[3720]4090        quantity_slice is the data to be stored at this time step
4091        """
4092       
4093        outfile = self.outfile
4094       
4095        # Get the variables
4096        time = outfile.variables[time_name]
4097        quantity = outfile.variables[self.quantity_name]
4098           
4099        i = len(time)
4100
4101        #Store time
4102        time[i] = i*self.time_step #self.domain.time
[4348]4103        quantity[i,:] = quantity_slice* self.quantity_multiplier
[3720]4104       
4105    def close(self):
4106        self.outfile.close()
4107
4108def urs2sww(basename_in='o', basename_out=None, verbose=False,
4109            remove_nc_files=True,
4110            minlat=None, maxlat=None,
4111            minlon= None, maxlon=None,
4112            mint=None, maxt=None,
4113            mean_stage=0,
[3930]4114            origin = None,
[3720]4115            zscale=1,
4116            fail_on_NaN=True,
4117            NaN_filler=0,
[4380]4118            elevation=None):
[3720]4119    """
4120    Convert URS C binary format for wave propagation to
4121    sww format native to abstract_2d_finite_volumes.
4122
4123    Specify only basename_in and read files of the form
[4378]4124    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4125    basefilename_waveheight-n-mux containing relative height,
4126    x-velocity and y-velocity, respectively.
[3720]4127
4128    Also convert latitude and longitude to UTM. All coordinates are
4129    assumed to be given in the GDA94 datum. The latitude and longitude
4130    information is for  a grid.
4131
4132    min's and max's: If omitted - full extend is used.
4133    To include a value min may equal it, while max must exceed it.
[3930]4134    Lat and lon are assumed to be in decimal degrees.
[4014]4135    NOTE: minlon is the most east boundary.
[3964]4136   
4137    origin is a 3-tuple with geo referenced
4138    UTM coordinates (zone, easting, northing)
4139    It will be the origin of the sww file. This shouldn't be used,
4140    since all of anuga should be able to handle an arbitary origin.
[3720]4141
[3964]4142
[3720]4143    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4144    which means that latitude is the fastest
4145    varying dimension (row major order, so to speak)
4146
4147    In URS C binary the latitudes and longitudes are in assending order.
4148    """
4149    if basename_out == None:
4150        basename_out = basename_in
4151    files_out = urs2nc(basename_in, basename_out)
4152    ferret2sww(basename_out,
4153               minlat=minlat,
4154               maxlat=maxlat,
4155               minlon=minlon,
[4014]4156               maxlon=maxlon,
[3720]4157               mint=mint,
4158               maxt=maxt,
4159               mean_stage=mean_stage,
[3930]4160               origin=origin,
[3720]4161               zscale=zscale,
4162               fail_on_NaN=fail_on_NaN,
4163               NaN_filler=NaN_filler,
4164               inverted_bathymetry=True,
4165               verbose=verbose)
4166    #print "files_out",files_out
4167    if remove_nc_files:
4168        for file_out in files_out:
4169            os.remove(file_out)
4170   
4171def urs2nc(basename_in = 'o', basename_out = 'urs'):
[3964]4172    """
4173    Convert the 3 urs files to 4 nc files.
4174
4175    The name of the urs file names must be;
[4378]4176    [basename_in]_velocity-z-mux
4177    [basename_in]_velocity-e-mux
4178    [basename_in]_waveheight-n-mux
[3964]4179   
4180    """
[4378]4181   
4182    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4183                basename_in + EAST_VELOCITY_LABEL,
4184                basename_in + NORTH_VELOCITY_LABEL]
[3720]4185    files_out = [basename_out+'_ha.nc',
4186                 basename_out+'_ua.nc',
4187                 basename_out+'_va.nc']
4188    quantities = ['HA','UA','VA']
4189
[5104]4190    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
4191    for i, file_name in enumerate(files_in):
4192        if os.access(file_name, os.F_OK) == 0:
4193            if os.access(file_name+'.mux', os.F_OK) == 0 :
4194                msg = 'File %s does not exist or is not accessible' %file_name
4195                raise IOError, msg
4196            else:
4197               files_in[i] += '.mux'
4198               print "file_name", file_name
[3720]4199    hashed_elevation = None
4200    for file_in, file_out, quantity in map(None, files_in,
4201                                           files_out,
4202                                           quantities):
[3820]4203        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
[3720]4204                                         file_out,
4205                                         quantity)
[3975]4206        #print "lonlatdep", lonlatdep
[3720]4207        if hashed_elevation == None:
4208            elevation_file = basename_out+'_e.nc'
[3964]4209            write_elevation_nc(elevation_file,
[3720]4210                                lon,
4211                                lat,
[3820]4212                                depth)
[3720]4213            hashed_elevation = myhash(lonlatdep)
4214        else:
4215            msg = "The elevation information in the mux files is inconsistent"
4216            assert hashed_elevation == myhash(lonlatdep), msg
4217    files_out.append(elevation_file)
4218    return files_out
4219   
4220def _binary_c2nc(file_in, file_out, quantity):
4221    """
[3964]4222    Reads in a quantity urs file and writes a quantity nc file.
4223    additionally, returns the depth and lat, long info,
4224    so it can be written to a file.
[3720]4225    """
4226    columns = 3 # long, lat , depth
[3964]4227    mux_file = open(file_in, 'rb')
[3720]4228   
4229    # Number of points/stations
[3964]4230    (points_num,)= unpack('i',mux_file.read(4))
[3720]4231
4232    # nt, int - Number of time steps
[3964]4233    (time_step_count,)= unpack('i',mux_file.read(4))
[3720]4234
4235    #dt, float - time step, seconds
[3964]4236    (time_step,) = unpack('f', mux_file.read(4))
[3720]4237   
4238    msg = "Bad data in the mux file."
4239    if points_num < 0:
[3964]4240        mux_file.close()
[3720]4241        raise ANUGAError, msg
4242    if time_step_count < 0:
[3964]4243        mux_file.close()
[3720]4244        raise ANUGAError, msg
4245    if time_step < 0:
[3964]4246        mux_file.close()
[3720]4247        raise ANUGAError, msg
4248   
4249    lonlatdep = p_array.array('f')
[3964]4250    lonlatdep.read(mux_file, columns * points_num)
[3720]4251    lonlatdep = array(lonlatdep, typecode=Float)   
[3826]4252    lonlatdep = reshape(lonlatdep, (points_num, columns))
[3720]4253   
[3820]4254    lon, lat, depth = lon_lat2grid(lonlatdep)
[3830]4255    lon_sorted = list(lon)
[3750]4256    lon_sorted.sort()
[3830]4257
[3964]4258    if not lon == lon_sorted:
[3750]4259        msg = "Longitudes in mux file are not in ascending order"
4260        raise IOError, msg
[3830]4261    lat_sorted = list(lat)
4262    lat_sorted.sort()
4263
[3964]4264    if not lat == lat_sorted:
[3750]4265        msg = "Latitudes in mux file are not in ascending order"
4266   
[3720]4267    nc_file = Write_nc(quantity,
4268                       file_out,
[3826]4269                       time_step_count,
4270                       time_step,
4271                       lon,
4272                       lat)
[3720]4273
4274    for i in range(time_step_count):
[3824]4275        #Read in a time slice  from mux file 
[3720]4276        hz_p_array = p_array.array('f')
[3964]4277        hz_p_array.read(mux_file, points_num)
[3720]4278        hz_p = array(hz_p_array, typecode=Float)
[3820]4279        hz_p = reshape(hz_p, (len(lon), len(lat)))
4280        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
[3824]4281
4282        #write time slice to nc file
[3720]4283        nc_file.store_timestep(hz_p)
[3964]4284    mux_file.close()
[3720]4285    nc_file.close()
4286
[3820]4287    return lonlatdep, lon, lat, depth
[3720]4288   
4289
[3964]4290def write_elevation_nc(file_out, lon, lat, depth_vector):
4291    """
4292    Write an nc elevation file.
4293    """
4294   
[3720]4295    # NetCDF file definition
4296    outfile = NetCDFFile(file_out, 'w')
4297
4298    #Create new file
4299    nc_lon_lat_header(outfile, lon, lat)
4300   
4301    # ELEVATION
4302    zname = 'ELEVATION'
4303    outfile.createVariable(zname, precision, (lat_name, lon_name))
4304    outfile.variables[zname].units='CENTIMETERS'
4305    outfile.variables[zname].missing_value=-1.e+034
4306
4307    outfile.variables[lon_name][:]= ensure_numeric(lon)
4308    outfile.variables[lat_name][:]= ensure_numeric(lat)
4309
4310    depth = reshape(depth_vector, ( len(lat), len(lon)))
4311    outfile.variables[zname][:]= depth
4312   
4313    outfile.close()
4314   
4315def nc_lon_lat_header(outfile, lon, lat):
[3964]4316    """
4317    outfile is the netcdf file handle.
4318    lon - a list/array of the longitudes
4319    lat - a list/array of the latitudes
4320    """
[3720]4321   
4322    outfile.institution = 'Geoscience Australia'
4323    outfile.description = 'Converted from URS binary C'
4324   
4325    # Longitude
4326    outfile.createDimension(lon_name, len(lon))
4327    outfile.createVariable(lon_name, precision, (lon_name,))
4328    outfile.variables[lon_name].point_spacing='uneven'
4329    outfile.variables[lon_name].units='degrees_east'
4330    outfile.variables[lon_name].assignValue(lon)
4331
4332
4333    # Latitude
4334    outfile.createDimension(lat_name, len(lat))
4335    outfile.createVariable(lat_name, precision, (lat_name,))
4336    outfile.variables[lat_name].point_spacing='uneven'
4337    outfile.variables[lat_name].units='degrees_north'
4338    outfile.variables[lat_name].assignValue(lat)
4339
4340
4341   
4342def lon_lat2grid(long_lat_dep):
4343    """
4344    given a list of points that are assumed to be an a grid,
4345    return the long's and lat's of the grid.
4346    long_lat_dep is an array where each row is a position.
4347    The first column is longitudes.
4348    The second column is latitudes.
[3820]4349
4350    The latitude is the fastest varying dimension - in mux files
[3720]4351    """
4352    LONG = 0
4353    LAT = 1
[3820]4354    QUANTITY = 2
[3830]4355
4356    long_lat_dep = ensure_numeric(long_lat_dep, Float)
[3820]4357   
[3826]4358    num_points = long_lat_dep.shape[0]
4359    this_rows_long = long_lat_dep[0,LONG]
[3964]4360
[3826]4361    # Count the length of unique latitudes
4362    i = 0
4363    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4364        i += 1
[3964]4365    # determine the lats and longsfrom the grid
[3826]4366    lat = long_lat_dep[:i, LAT]       
[3964]4367    long = long_lat_dep[::i, LONG]
4368   
[3826]4369    lenlong = len(long)
4370    lenlat = len(lat)
[3964]4371    #print 'len lat', lat, len(lat)
4372    #print 'len long', long, len(long)
[3826]4373         
4374    msg = 'Input data is not gridded'     
4375    assert num_points % lenlat == 0, msg
4376    assert num_points % lenlong == 0, msg
4377         
4378    # Test that data is gridded       
4379    for i in range(lenlong):
[3720]4380        msg = 'Data is not gridded.  It must be for this operation'
[3826]4381        first = i*lenlat
4382        last = first + lenlat
4383               
4384        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4385        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
[3720]4386   
[3826]4387   
4388#    print 'range long', min(long), max(long)
4389#    print 'range lat', min(lat), max(lat)
4390#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4391#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4392   
4393   
4394   
4395    msg = 'Out of range latitudes/longitudes'
[3720]4396    for l in lat:assert -90 < l < 90 , msg
4397    for l in long:assert -180 < l < 180 , msg
4398
[3826]4399    # Changing quantity from lat being the fastest varying dimension to
[3820]4400    # long being the fastest varying dimension
4401    # FIXME - make this faster/do this a better way
4402    # use numeric transpose, after reshaping the quantity vector
[3826]4403#    quantity = zeros(len(long_lat_dep), Float)
4404    quantity = zeros(num_points, Float)
4405   
4406#    print 'num',num_points
[3820]4407    for lat_i, _ in enumerate(lat):
4408        for long_i, _ in enumerate(long):
4409            q_index = lat_i*lenlong+long_i
4410            lld_index = long_i*lenlat+lat_i
[3826]4411#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4412            temp = long_lat_dep[lld_index, QUANTITY]
4413            quantity[q_index] = temp
[3820]4414           
4415    return long, lat, quantity
4416
[5250]4417####  END URS 2 SWW  ###
[4223]4418
[5250]4419#### URS UNGRIDDED 2 SWW ###
[4223]4420
[5250]4421### PRODUCING THE POINTS NEEDED FILE ###
4422
[5248]4423# Ones used for FESA 2007 results
4424#LL_LAT = -50.0
4425#LL_LONG = 80.0
4426#GRID_SPACING = 1.0/60.0
4427#LAT_AMOUNT = 4800
4428#LONG_AMOUNT = 3600
4429
[4318]4430def URS_points_needed_to_file(file_name, boundary_polygon, zone,
[5249]4431                              ll_lat, ll_long,
4432                              grid_spacing, 
4433                              lat_amount, long_amount,
[5253]4434                              isSouthernHemisphere=True,
[4318]4435                              export_csv=False, use_cache=False,
[4284]4436                              verbose=False):
[4238]4437    """
[5253]4438    Given the info to replicate the URS grid and a polygon output
4439    a file that specifies the cloud of boundary points for URS.
[5369]4440
4441    This creates a .urs file.  This is in the format used by URS;
4442    1st line is the number of points,
4443    each line after represents a point,in lats and longs.
[5253]4444   
4445    Note: The polygon cannot cross zones or hemispheres.
[5369]4446
4447    A work-a-round for different zones or hemispheres is to run this twice,
4448    once for each zone, and then combine the output.
[5253]4449   
[4238]4450    file_name - name of the urs file produced for David.
4451    boundary_polygon - a list of points that describes a polygon.
4452                      The last point is assumed ot join the first point.
4453                      This is in UTM (lat long would be better though)
4454
[5248]4455     This is info about the URS model that needs to be inputted.
[5253]4456     
[4238]4457    ll_lat - lower left latitude, in decimal degrees
4458    ll-long - lower left longitude, in decimal degrees
4459    grid_spacing - in deciamal degrees
[5254]4460    lat_amount - number of latitudes
4461    long_amount- number of longs
[4238]4462
4463
4464    Don't add the file extension.  It will be added.
4465    """
[4318]4466    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4467                            grid_spacing, 
[5253]4468                            lat_amount, long_amount, isSouthernHemisphere,
4469                            use_cache, verbose)
[4254]4470    if not file_name[-4:] == ".urs":
4471        file_name += ".urs"
[5253]4472    geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)
[4284]4473    if export_csv:
4474        if file_name[-4:] == ".urs":
4475            file_name = file_name[:-4] + ".csv"
4476        geo.export_points_file(file_name)
[5254]4477    return geo
[4284]4478
[5250]4479def URS_points_needed(boundary_polygon, zone, ll_lat,
4480                      ll_long, grid_spacing, 
[5253]4481                      lat_amount, long_amount, isSouthHemisphere=True,
[4318]4482                      use_cache=False, verbose=False):
4483    args = (boundary_polygon,
[5253]4484            zone, ll_lat,
4485            ll_long, grid_spacing, 
4486            lat_amount, long_amount, isSouthHemisphere)
4487    kwargs = {} 
[4284]4488    if use_cache is True:
4489        try:
4490            from anuga.caching import cache
4491        except:
4492            msg = 'Caching was requested, but caching module'+\
4493                  'could not be imported'
4494            raise msg
4495
4496
4497        geo = cache(_URS_points_needed,
4498                  args, kwargs,
4499                  verbose=verbose,
4500                  compression=False)
4501    else:
[5253]4502        geo = apply(_URS_points_needed, args, kwargs)
[5250]4503
4504    return geo
4505
4506def _URS_points_needed(boundary_polygon,
[4318]4507                      zone, ll_lat,
[4284]4508                      ll_long, grid_spacing, 
[5253]4509                      lat_amount, long_amount,
4510                       isSouthHemisphere):
[4223]4511    """
4512    boundary_polygon - a list of points that describes a polygon.
4513                      The last point is assumed ot join the first point.
[5250]4514                      This is in UTM (lat long would b better though)
[4223]4515
4516    ll_lat - lower left latitude, in decimal degrees
4517    ll-long - lower left longitude, in decimal degrees
4518    grid_spacing - in deciamal degrees
4519
4520    """
[4254]4521   
[4223]4522    from sets import ImmutableSet
4523   
4524    msg = "grid_spacing can not be zero"
[5250]4525    assert not grid_spacing == 0, msg
[4223]4526    a = boundary_polygon
4527    # List of segments.  Each segment is two points.
4528    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4529    # convert the segs to Lat's and longs.
[4318]4530   
4531    # Don't assume the zone of the segments is the same as the lower left
4532    # corner of the lat long data!!  They can easily be in different zones
4533   
[4223]4534    lat_long_set = ImmutableSet()
4535    for seg in segs:
4536        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4537                      lat_amount, long_amount, zone, isSouthHemisphere)
[4223]4538        lat_long_set |= ImmutableSet(points_lat_long)
[5253]4539    if lat_long_set == ImmutableSet([]):
4540        msg = """URS region specified and polygon does not overlap."""
4541        raise ValueError, msg
4542
4543    # Warning there is no info in geospatial saying the hemisphere of
4544    # these points.  There should be.
[4223]4545    geo = Geospatial_data(data_points=list(lat_long_set),
4546                              points_are_lats_longs=True)
4547    return geo
4548   
4549def points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4550                  lat_amount, long_amount, zone,
4551                  isSouthHemisphere):
[4223]4552    """
[5254]4553    seg is two points, in UTM
[4223]4554    return a list of the points, in lats and longs that are needed to
4555    interpolate any point on the segment.
4556    """
[4280]4557    from math import sqrt
[4223]4558    #print "zone",zone
4559    geo_reference = Geo_reference(zone=zone)
[5254]4560    #print "seg", seg
[4223]4561    geo = Geospatial_data(seg,geo_reference=geo_reference)
[5253]4562    seg_lat_long = geo.get_data_points(as_lat_long=True,
4563                                       isSouthHemisphere=isSouthHemisphere)
[5254]4564    #print "seg_lat_long", seg_lat_long
[4223]4565    # 1.415 = 2^0.5, rounded up....
[4280]4566    sqrt_2_rounded_up = 1.415
4567    buffer = sqrt_2_rounded_up * grid_spacing
[4223]4568   
4569    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4570    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
[4238]4571    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4572    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
[4223]4573
[5254]4574    #print "min_long", min_long
4575    #print "ll_long", ll_long
4576    #print "grid_spacing", grid_spacing
[4223]4577    first_row = (min_long - ll_long)/grid_spacing
4578    # To round up
4579    first_row_long = int(round(first_row + 0.5))
4580    #print "first_row", first_row_long
4581
4582    last_row = (max_long - ll_long)/grid_spacing # round down
4583    last_row_long = int(round(last_row))
4584    #print "last_row",last_row _long
4585   
4586    first_row = (min_lat - ll_lat)/grid_spacing
4587    # To round up
4588    first_row_lat = int(round(first_row + 0.5))
4589    #print "first_row", first_row_lat
4590
4591    last_row = (max_lat - ll_lat)/grid_spacing # round down
4592    last_row_lat = int(round(last_row))
4593    #print "last_row",last_row_lat
4594
[4283]4595    # to work out the max distance -
4596    # 111120 - horizontal distance between 1 deg latitude.
4597    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4598    max_distance = 157147.4112 * grid_spacing
4599    #print "max_distance", max_distance #2619.12 m for 1 minute
[4223]4600    points_lat_long = []
4601    # Create a list of the lat long points to include.
4602    for index_lat in range(first_row_lat, last_row_lat + 1):
4603        for index_long in range(first_row_long, last_row_long + 1):
[5254]4604            #print "index_lat", index_lat
4605            #print "index_long", index_long
[4223]4606            lat = ll_lat + index_lat*grid_spacing
4607            long = ll_long + index_long*grid_spacing
[4280]4608
4609            #filter here to keep good points
4610            if keep_point(lat, long, seg, max_distance):
4611                points_lat_long.append((lat, long)) #must be hashable
[4223]4612    #print "points_lat_long", points_lat_long
[4268]4613
[4280]4614    # Now that we have these points, lets throw ones out that are too far away
4615    return points_lat_long
4616
4617def keep_point(lat, long, seg, max_distance):
4618    """
4619    seg is two points, UTM
4620    """
4621    from math import sqrt
4622    _ , x0, y0 = redfearn(lat, long)
4623    x1 = seg[0][0]
4624    y1 = seg[0][1]
4625    x2 = seg[1][0]
4626    y2 = seg[1][1]
4627    x2_1 = x2-x1
4628    y2_1 = y2-y1
[5254]4629    try:
4630        d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \
4631            (x2_1)*(x2_1)+(y2_1)*(y2_1))
4632    except ZeroDivisionError:
4633        #print "seg", seg
4634        #print "x0", x0
4635        #print "y0", y0
4636        if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \
4637           abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
4638            return True
4639        else:
4640            return False
4641   
[4280]4642    if d <= max_distance:
4643        return True
4644    else:
4645        return False
4646   
[4268]4647    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
[4378]4648   
[4400]4649WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
[4378]4650EAST_VELOCITY_LABEL =  '_velocity-e-mux'
[4400]4651NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
[4268]4652def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
[4382]4653                      mint=None, maxt=None,
4654                      mean_stage=0,
4655                      origin=None,
4656                      hole_points_UTM=None,
4657                      zscale=1):
[4298]4658    """   
4659    Convert URS C binary format for wave propagation to
4660    sww format native to abstract_2d_finite_volumes.
4661
[4378]4662
[4298]4663    Specify only basename_in and read files of the form
[4378]4664    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4665    basefilename_waveheight-n-mux containing relative height,
4666    x-velocity and y-velocity, respectively.
[4298]4667
4668    Also convert latitude and longitude to UTM. All coordinates are
4669    assumed to be given in the GDA94 datum. The latitude and longitude
4670    information is assumed ungridded grid.
4671
4672    min's and max's: If omitted - full extend is used.
4673    To include a value min ans max may equal it.
4674    Lat and lon are assumed to be in decimal degrees.
4675   
4676    origin is a 3-tuple with geo referenced
4677    UTM coordinates (zone, easting, northing)
4678    It will be the origin of the sww file. This shouldn't be used,
4679    since all of anuga should be able to handle an arbitary origin.
[4455]4680    The mux point info is NOT relative to this origin.
[4298]4681
4682
4683    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4684    which means that latitude is the fastest
4685    varying dimension (row major order, so to speak)
4686
4687    In URS C binary the latitudes and longitudes are in assending order.
[4363]4688
4689    Note, interpolations of the resulting sww file will be different
4690    from results of urs2sww.  This is due to the interpolation
4691    function used, and the different grid structure between urs2sww
4692    and this function.
4693   
4694    Interpolating data that has an underlying gridded source can
4695    easily end up with different values, depending on the underlying
4696    mesh.
4697
4698    consider these 4 points
4699    50  -50
4700
4701    0     0
4702
4703    The grid can be
4704     -
4705    |\|    A
4706     -
4707     or;
4708      -
4709     |/|   B
4710      -
4711      If a point is just below the center of the midpoint, it will have a
4712      +ve value in grid A and a -ve value in grid B.
[4292]4713    """ 
[4899]4714    from anuga.mesh_engine.mesh_engine import NoTrianglesError
4715    from anuga.pmesh.mesh import Mesh
[4280]4716
[4378]4717    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4718                basename_in + EAST_VELOCITY_LABEL,
4719                basename_in + NORTH_VELOCITY_LABEL]
[4271]4720    quantities = ['HA','UA','VA']
[4268]4721
[4271]4722    # instanciate urs_points of the three mux files.
4723    mux = {}
4724    for quantity, file in map(None, quantities, files_in):
4725        mux[quantity] = Urs_points(file)
4726       
[4298]4727    # Could check that the depth is the same. (hashing)
[4268]4728
[4271]4729    # handle to a mux file to do depth stuff
4730    a_mux = mux[quantities[0]]
4731   
4732    # Convert to utm
4733    lat = a_mux.lonlatdep[:,1]
4734    long = a_mux.lonlatdep[:,0]
4735    points_utm, zone = convert_from_latlon_to_utm( \
4736        latitudes=lat, longitudes=long)
4737    #print "points_utm", points_utm
4738    #print "zone", zone
4739
[4280]4740    elevation = a_mux.lonlatdep[:,2] * -1 #
[4271]4741   
4742    # grid ( create a mesh from the selected points)
[4280]4743    # This mesh has a problem.  Triangles are streched over ungridded areas.
4744    #  If these areas could be described as holes in pmesh, that would be great
[4381]4745
4746    # I can't just get the user to selection a point in the middle.
4747    # A boundary is needed around these points.
4748    # But if the zone of points is obvious enough auto-segment should do
4749    # a good boundary.
[4271]4750    mesh = Mesh()
4751    mesh.add_vertices(points_utm)
[4455]4752    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4753    # To try and avoid alpha shape 'hugging' too much
4754    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
[4382]4755    if hole_points_UTM is not None:
4756        point = ensure_absolute(hole_points_UTM)
4757        mesh.add_hole(point[0], point[1])
4758    try:
4759        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4760    except NoTrianglesError:
4761        # This is a bit of a hack, going in and changing the
4762        # data structure.
4763        mesh.holes = []
4764        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
[4271]4765    mesh_dic = mesh.Mesh2MeshList()
4766
[4522]4767    #mesh.export_mesh_file(basename_in + '_168.tsh')
4768    #import sys; sys.exit()
[4295]4769    # These are the times of the mux file
4770    mux_times = []
[4271]4771    for i in range(a_mux.time_step_count):
[4295]4772        mux_times.append(a_mux.time_step * i) 
4773    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4774    times = mux_times[mux_times_start_i:mux_times_fin_i]
4775   
4776    if mux_times_start_i == mux_times_fin_i:
4777        # Close the mux files
4778        for quantity, file in map(None, quantities, files_in):
4779            mux[quantity].close()
4780        msg="Due to mint and maxt there's no time info in the boundary SWW."
[4608]4781        raise Exception, msg
[4271]4782       
[4295]4783    # If this raise is removed there is currently no downstream errors
4784           
[4271]4785    points_utm=ensure_numeric(points_utm)
4786    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4787           ensure_numeric(points_utm)
4788   
4789    volumes = mesh_dic['generatedtrianglelist']
4790   
4791    # write sww intro and grid stuff.   
[4268]4792    if basename_out is None:
4793        swwname = basename_in + '.sww'
4794    else:
4795        swwname = basename_out + '.sww'
4796
[4348]4797    if verbose: print 'Output to ', swwname
[4268]4798    outfile = NetCDFFile(swwname, 'w')
[4295]4799    # For a different way of doing this, check out tsh2sww
4800    # work out sww_times and the index range this covers
[4455]4801    sww = Write_sww()
[4704]4802    sww.store_header(outfile, times, len(volumes), len(points_utm),
[4862]4803                     verbose=verbose,sww_precision=Float)
[4295]4804    outfile.mean_stage = mean_stage
4805    outfile.zscale = zscale
[4455]4806
[4704]4807    sww.store_triangulation(outfile, points_utm, volumes,
[4455]4808                            elevation, zone,  new_origin=origin,
4809                            verbose=verbose)
[4292]4810   
4811    if verbose: print 'Converting quantities'
[4280]4812    j = 0
4813    # Read in a time slice from each mux file and write it to the sww file
[4455]4814    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
[4295]4815        if j >= mux_times_start_i and j < mux_times_fin_i:
[4455]4816            stage = zscale*ha + mean_stage
4817            h = stage - elevation
4818            xmomentum = ua*h
4819            ymomentum = -1*va*h # -1 since in mux files south is positive.
[4704]4820            sww.store_quantities(outfile, 
4821                                 slice_index=j - mux_times_start_i,
4822                                 verbose=verbose,
4823                                 stage=stage,
4824                                 xmomentum=xmomentum,
[4862]4825                                 ymomentum=ymomentum,
4826                                 sww_precision=Float)
[4280]4827        j += 1
[4455]4828    if verbose: sww.verbose_quantities(outfile)
[4280]4829    outfile.close()
[4271]4830    #
4831    # Do some conversions while writing the sww file
[4455]4832
[5347]4833    ##################################
4834    # READ MUX2 FILES line of points #
4835    ##################################
4836
4837WAVEHEIGHT_MUX2_LABEL = '_waveheight-z-mux2'
4838EAST_VELOCITY_MUX2_LABEL =  '_velocity-e-mux2'
4839NORTH_VELOCITY_MUX2_LABEL =  '_velocity-n-mux2'   
4840
4841def read_mux2_py(filenames,weights=None):
4842
4843    from Numeric import ones,Float,compress,zeros,arange
4844    from urs_ext import read_mux2
4845
4846    if weights is None:
4847        weights=ones(len(filenames),Float)/len(filenames) #default 1/numSrc
4848
4849    file_params=-1*ones(3,Float)#[nsta,dt,nt]
[5358]4850    write=0 #if true write txt files to current directory as well
[5347]4851    data=read_mux2(1,filenames,weights,file_params,write)
4852
4853    msg='File parameter values were not read in correctly from c file'
4854    assert len(compress(file_params>0,file_params))!=0,msg
4855    msg='The number of stations specifed in the c array and in the file are inconsitent'
4856    assert file_params[0]==data.shape[0],msg
[4455]4857   
[5347]4858    nsta=int(file_params[0])
4859    msg='Must have at least one station'
4860    assert nsta>0,msg
4861    dt=file_params[1]
4862    msg='Must have a postive timestep'
4863    assert dt>0,msg
4864    nt=int(file_params[2])
4865    msg='Must have at least one gauge value'
4866    assert nt>0,msg
4867   
4868    OFFSET=5 #number of site parameters p passed back with data
4869    #p=[geolat,geolon,depth,start_tstep,finish_tstep]
4870
4871    times=dt*arange(1,(data.shape[1]-OFFSET)+1)
4872    latitudes=zeros(data.shape[0],Float)
4873    longitudes=zeros(data.shape[0],Float)
4874    elevation=zeros(data.shape[0],Float)
[5358]4875    quantity=zeros((data.shape[0],data.shape[1]-OFFSET),Float)
[5347]4876    for i in range(0,data.shape[0]):
4877        latitudes[i]=data[i][data.shape[1]-OFFSET]
4878        longitudes[i]=data[i][data.shape[1]-OFFSET+1]
4879        elevation[i]=-data[i][data.shape[1]-OFFSET+2]
[5358]4880        quantity[i]=data[i][:-OFFSET]
[5347]4881
[5358]4882    return times, latitudes, longitudes, elevation, quantity
[5347]4883
[4295]4884def mux2sww_time(mux_times, mint, maxt):
4885    """
4886    """
[4271]4887
[4295]4888    if mint == None:
4889        mux_times_start_i = 0
4890    else:
4891        mux_times_start_i = searchsorted(mux_times, mint)
4892       
4893    if maxt == None:
4894        mux_times_fin_i = len(mux_times)
4895    else:
4896        maxt += 0.5 # so if you specify a time where there is
4897                    # data that time will be included
4898        mux_times_fin_i = searchsorted(mux_times, maxt)
4899
4900    return mux_times_start_i, mux_times_fin_i
4901
[4455]4902
[5347]4903def urs2sts(basename_in, basename_out = None, verbose = False, origin = None,
4904            mean_stage=0.0,zscale=1.0,
4905            minlat = None, maxlat = None,
4906            minlon = None, maxlon = None):
4907    """Convert URS mux2 format for wave propagation to sts format
4908
4909    Specify only basename_in and read files of the form
4910    out_waveheight-z-mux2
4911
4912    Also convert latitude and longitude to UTM. All coordinates are
4913    assumed to be given in the GDA94 datum
4914
4915    origin is a 3-tuple with geo referenced
4916    UTM coordinates (zone, easting, northing)
4917    """
4918    import os
4919    from Scientific.IO.NetCDF import NetCDFFile
4920    from Numeric import Float, Int, Int32, searchsorted, zeros, array
4921    from Numeric import allclose, around
4922
4923    precision = Float
4924
4925    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
4926
4927    if minlat != None:
4928        assert -90 < minlat < 90 , msg
4929    if maxlat != None:
4930        assert -90 < maxlat < 90 , msg
4931        if minlat != None:
4932            assert maxlat > minlat
4933    if minlon != None:
4934        assert -180 < minlon < 180 , msg
4935    if maxlon != None:
4936        assert -180 < maxlon < 180 , msg
4937        if minlon != None:
4938            assert maxlon > minlon
4939
4940    if basename_out is None:
4941        stsname = basename_in + '.sts'
4942    else:
4943        stsname = basename_out + '.sts'
4944
4945    files_in = [basename_in + WAVEHEIGHT_MUX2_LABEL,
4946                basename_in + EAST_VELOCITY_MUX2_LABEL,
4947                basename_in + NORTH_VELOCITY_MUX2_LABEL]
4948    quantities = ['HA','UA','VA']
4949
[5358]4950    for file_in in files_in:
4951        if (os.access(file_in, os.F_OK) == 0):
4952            msg = 'File %s does not exist or is not accessible' %file_in
4953            raise IOError, msg
4954
[5347]4955    #need to do this for velocity-e-mux2 and velocity-n-mux2 files as well
4956    #for now set x_momentum and y_moentum quantities to zero
4957    if (verbose): print 'reading mux2 file'
4958    mux={}
4959    for quantity, file in map(None, quantities, files_in):
4960        times_urs, latitudes_urs, longitudes_urs, elevation, mux[quantity] = read_mux2_py([file])
4961        if quantity!=quantities[0]:
4962            msg='%s, %s and %s have inconsitent gauge data'%(files_in[0],files_in[1],files_in[2])
4963            assert allclose(times_urs,times_urs_old),msg
4964            assert allclose(latitudes_urs,latitudes_urs_old),msg
4965            assert allclose(longitudes_urs,longitudes_urs_old),msg
4966            assert allclose(elevation,elevation_old),msg
4967        times_urs_old=times_urs
4968        latitudes_urs_old=latitudes_urs
4969        longitudes_urs_old=longitudes_urs
4970        elevation_old=elevation
4971       
4972    if (minlat is not None) and (minlon is not None) and (maxlat is not None) and (maxlon is not None):
[5358]4973        if verbose: print 'Cliiping urs data'
[5347]4974        latitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),latitudes_urs)
4975        longitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),longitudes_urs)
4976        times = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),times_urs)
4977    else:
4978        latitudes=latitudes_urs
4979        longitudes=longitudes_urs
4980        times=times_urs
4981
[5358]4982    msg='File is empty and or clipped region not in file region'
4983    assert len(latitudes>0),msg
4984
[5347]4985    number_of_points = latitudes.shape[0]
4986    number_of_times = times.shape[0]
4987    number_of_latitudes = latitudes.shape[0]
4988    number_of_longitudes = longitudes.shape[0]
4989
4990    # NetCDF file definition
4991    outfile = NetCDFFile(stsname, 'w')
4992
4993    description = 'Converted from URS mux2 files: %s'\
4994                  %(basename_in)
4995   
4996    # Create new file
4997    starttime = times[0]
4998    sts = Write_sts()
4999    sts.store_header(outfile, times,number_of_points, description=description,
5000                     verbose=verbose,sts_precision=Float)
5001   
5002    # Store
5003    from anuga.coordinate_transforms.redfearn import redfearn
5004    x = zeros(number_of_points, Float)  #Easting
5005    y = zeros(number_of_points, Float)  #Northing
5006
5007    # Check zone boundaries
5008    refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 
5009
5010    for i in range(number_of_points):
5011        zone, easting, northing = redfearn(latitudes[i],longitudes[i])
5012        x[i] = easting
5013        y[i] = northing
[5358]5014        msg='all sts gauges need to be in the same zone'
5015        assert zone==refzone,msg
[5347]5016
5017    if origin is None:
5018        origin = Geo_reference(refzone,min(x),min(y))
5019    geo_ref = write_NetCDF_georeference(origin, outfile)
5020
5021    z = elevation
5022   
5023    #print geo_ref.get_xllcorner()
5024    #print geo_ref.get_yllcorner()
5025
5026    z = resize(z,outfile.variables['z'][:].shape)
5027    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
5028    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
5029    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
5030    outfile.variables['elevation'][:] = z
5031
5032    stage = outfile.variables['stage']
5033    xmomentum = outfile.variables['xmomentum']
5034    ymomentum =outfile.variables['ymomentum']
5035
5036    if verbose: print 'Converting quantities'
5037    for j in range(len(times)):
5038        for i in range(number_of_points):
5039            w = zscale*mux['HA'][i,j] + mean_stage
5040            h=w-elevation[i]
5041            stage[j,i] = w
5042            #delete following two lines once velcotu files are read in.
5043            xmomentum[j,i] = mux['UA'][i,j]*h
5044            ymomentum[j,i] = mux['VA'][i,j]*h
5045
5046    outfile.close()
5047
[4455]5048class Write_sww:
[4704]5049    from anuga.shallow_water.shallow_water_domain import Domain
5050
5051    # FIXME (Ole): Hardwiring the conserved quantities like
5052    # this could be a problem. I would prefer taking them from
5053    # the instantiation of Domain.
[4780]5054    #
5055    # (DSG) There is not always a Domain instance when Write_sww is used.
5056    # Check to see if this is the same level of hardwiring as is in
5057    # shallow water doamain.
5058   
[4455]5059    sww_quantities = Domain.conserved_quantities
[4704]5060
5061
[4455]5062    RANGE = '_range'
[4704]5063    EXTREMA = ':extrema'
[4699]5064
[4455]5065    def __init__(self):
5066        pass
[4387]5067   
[4704]5068    def store_header(self,
5069                     outfile,
5070                     times,
5071                     number_of_volumes,
5072                     number_of_points,
5073                     description='Converted from XXX',
5074                     smoothing=True,
[4862]5075                     order=1,
5076                     sww_precision=Float32,
5077                     verbose=False):
[4455]5078        """
5079        outfile - the name of the file that will be written
[4558]5080        times - A list of the time slice times OR a start time
5081        Note, if a list is given the info will be made relative.
[4455]5082        number_of_volumes - the number of triangles
5083        """
[4268]5084   
[4455]5085        outfile.institution = 'Geoscience Australia'
5086        outfile.description = description
[4268]5087
[4699]5088        # For sww compatibility
[4455]5089        if smoothing is True:
5090            # Smoothing to be depreciated
5091            outfile.smoothing = 'Yes'
5092            outfile.vertices_are_stored_uniquely = 'False'
5093        else:
5094            # Smoothing to be depreciated
5095            outfile.smoothing = 'No'
5096            outfile.vertices_are_stored_uniquely = 'True'
5097        outfile.order = order
[4268]5098
[4455]5099        try:
5100            revision_number = get_revision_number()
5101        except:
5102            revision_number = None
[4699]5103        # Allow None to be stored as a string               
5104        outfile.revision_number = str(revision_number) 
[4268]5105
[4699]5106
5107       
5108        # times - A list or array of the time slice times OR a start time
5109        # times = ensure_numeric(times)
5110        # Start time in seconds since the epoch (midnight 1/1/1970)
5111
5112        # This is being used to seperate one number from a list.
[4455]5113        # what it is actually doing is sorting lists from numeric arrays.
5114        if type(times) is list or type(times) is ArrayType: 
5115            number_of_times = len(times)
[4558]5116            times = ensure_numeric(times) 
5117            if number_of_times == 0:
5118                starttime = 0
5119            else:
5120                starttime = times[0]
5121                times = times - starttime  #Store relative times
[4455]5122        else:
5123            number_of_times = 0
[4558]5124            starttime = times
5125            #times = ensure_numeric([])
5126        outfile.starttime = starttime
[4455]5127        # dimension definitions
5128        outfile.createDimension('number_of_volumes', number_of_volumes)
5129        outfile.createDimension('number_of_vertices', 3)
5130        outfile.createDimension('numbers_in_range', 2)
5131   
5132        if smoothing is True:
5133            outfile.createDimension('number_of_points', number_of_points)
5134       
5135            # FIXME(Ole): This will cause sww files for paralle domains to
5136            # have ghost nodes stored (but not used by triangles).
5137            # To clean this up, we have to change get_vertex_values and
5138            # friends in quantity.py (but I can't be bothered right now)
5139        else:
5140            outfile.createDimension('number_of_points', 3*number_of_volumes)
5141        outfile.createDimension('number_of_timesteps', number_of_times)
[4268]5142
[4455]5143        # variable definitions
[4862]5144        outfile.createVariable('x', sww_precision, ('number_of_points',))
5145        outfile.createVariable('y', sww_precision, ('number_of_points',))
5146        outfile.createVariable('elevation', sww_precision, ('number_of_points',))
[4455]5147        q = 'elevation'
[4862]5148        outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5149                               ('numbers_in_range',))
[4268]5150
[4704]5151
[4699]5152        # Initialise ranges with small and large sentinels.
5153        # If this was in pure Python we could have used None sensibly
[4862]5154        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5155        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
5156
[4704]5157        # FIXME: Backwards compatibility
[4862]5158        outfile.createVariable('z', sww_precision, ('number_of_points',))
[4455]5159        #################################
[4268]5160
[4455]5161        outfile.createVariable('volumes', Int, ('number_of_volumes',
5162                                                'number_of_vertices'))
[4862]5163        # Doing sww_precision instead of Float gives cast errors.
5164        outfile.createVariable('time', Float,
[4455]5165                               ('number_of_timesteps',))
5166       
5167        for q in Write_sww.sww_quantities:
[4862]5168            outfile.createVariable(q, sww_precision,
[4455]5169                                   ('number_of_timesteps',
5170                                    'number_of_points')) 
[4862]5171            outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5172                                   ('numbers_in_range',))
[4699]5173
5174            # Initialise ranges with small and large sentinels.
5175            # If this was in pure Python we could have used None sensibly
[4862]5176            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5177            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
[4455]5178           
[4541]5179        if type(times) is list or type(times) is ArrayType: 
5180            outfile.variables['time'][:] = times    #Store time relative
5181           
[4455]5182        if verbose:
5183            print '------------------------------------------------'
5184            print 'Statistics:'
5185            print '    t in [%f, %f], len(t) == %d'\
5186                  %(min(times.flat), max(times.flat), len(times.flat))
[4268]5187
[4455]5188       
[4704]5189    def store_triangulation(self,
5190                            outfile,
5191                            points_utm,
5192                            volumes,
5193                            elevation, zone=None, new_origin=None, 
5194                            points_georeference=None, verbose=False):
[4455]5195        """
5196       
5197        new_origin - qa georeference that the points can be set to. (Maybe
5198        do this before calling this function.)
5199       
5200        points_utm - currently a list or array of the points in UTM.
5201        points_georeference - the georeference of the points_utm
5202       
5203        How about passing new_origin and current_origin.
5204        If you get both, do a convertion from the old to the new.
5205       
[4665]5206        If you only get new_origin, the points are absolute,
5207        convert to relative
[4455]5208       
5209        if you only get the current_origin the points are relative, store
5210        as relative.
5211       
5212        if you get no georefs create a new georef based on the minimums of
5213        points_utm.  (Another option would be to default to absolute)
5214       
5215        Yes, and this is done in another part of the code.
5216        Probably geospatial.
5217       
5218        If you don't supply either geo_refs, then supply a zone. If not
5219        the default zone will be used.
5220       
5221       
5222        precon
5223       
5224        header has been called.
5225        """
5226       
5227        number_of_points = len(points_utm)   
[4665]5228        volumes = array(volumes) 
5229        points_utm = array(points_utm)
[4268]5230
[4455]5231        # given the two geo_refs and the points, do the stuff
5232        # described in the method header
5233        # if this is needed else where, pull out as a function
5234        points_georeference = ensure_geo_reference(points_georeference)
5235        new_origin = ensure_geo_reference(new_origin)
5236        if new_origin is None and points_georeference is not None:
5237            points = points_utm
5238            geo_ref = points_georeference
5239        else:
5240            if new_origin is None:
5241                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5242                                           min(points_utm[:,1]))
5243            points = new_origin.change_points_geo_ref(points_utm,
5244                                                      points_georeference)
5245            geo_ref = new_origin
[4268]5246
[4455]5247        # At this stage I need a georef and points
5248        # the points are relative to the georef
5249        geo_ref.write_NetCDF(outfile)
[4280]5250   
[4455]5251        # This will put the geo ref in the middle
[4665]5252        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
5253       
[4455]5254        x =  points[:,0]
5255        y =  points[:,1]
5256        z = outfile.variables['z'][:]
5257   
5258        if verbose:
5259            print '------------------------------------------------'
5260            print 'More Statistics:'
5261            print '  Extent (/lon):'
5262            print '    x in [%f, %f], len(lat) == %d'\
5263                  %(min(x), max(x),
5264                    len(x))
5265            print '    y in [%f, %f], len(lon) == %d'\
5266                  %(min(y), max(y),
5267                    len(y))
5268            print '    z in [%f, %f], len(z) == %d'\
5269                  %(min(elevation), max(elevation),
5270                    len(elevation))
5271            print 'geo_ref: ',geo_ref
5272            print '------------------------------------------------'
5273           
5274        #z = resize(bath_grid,outfile.variables['z'][:].shape)
[4862]5275        #print "points[:,0]", points[:,0]
[4455]5276        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5277        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5278        outfile.variables['z'][:] = elevation
5279        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5280        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
[4268]5281
[4455]5282        q = 'elevation'
5283        # This updates the _range values
5284        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5285        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
[4280]5286
[4704]5287
[4862]5288    def store_quantities(self, outfile, sww_precision=Float32,
[4704]5289                         slice_index=None, time=None,
5290                         verbose=False, **quant):
[4455]5291        """
5292        Write the quantity info.
[4280]5293
[4455]5294        **quant is extra keyword arguments passed in. These must be
5295          the sww quantities, currently; stage, xmomentum, ymomentum.
5296       
5297        if the time array is already been built, use the slice_index
5298        to specify the index.
5299       
5300        Otherwise, use time to increase the time dimension
[4280]5301
[4455]5302        Maybe make this general, but the viewer assumes these quantities,
5303        so maybe we don't want it general - unless the viewer is general
5304       
5305        precon
5306        triangulation and
5307        header have been called.
5308        """
[4280]5309
[4455]5310        if time is not None:
5311            file_time = outfile.variables['time']
5312            slice_index = len(file_time)
5313            file_time[slice_index] = time   
5314
[4938]5315        # Write the conserved quantities from Domain.
[4455]5316        # Typically stage,  xmomentum, ymomentum
5317        # other quantities will be ignored, silently.
[4938]5318        # Also write the ranges: stage_range,
5319        # xmomentum_range and ymomentum_range
[4455]5320        for q in Write_sww.sww_quantities:
5321            if not quant.has_key(q):
5322                msg = 'SWW file can not write quantity %s' %q
5323                raise NewQuantity, msg
5324            else:
5325                q_values = quant[q]
[4862]5326                outfile.variables[q][slice_index] = \
5327                                q_values.astype(sww_precision)
[4455]5328
5329                # This updates the _range values
5330                q_range = outfile.variables[q+Write_sww.RANGE][:]
5331                q_values_min = min(q_values)
5332                if q_values_min < q_range[0]:
5333                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5334                q_values_max = max(q_values)
5335                if q_values_max > q_range[1]:
5336                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5337
5338    def verbose_quantities(self, outfile):
[4280]5339        print '------------------------------------------------'
5340        print 'More Statistics:'
[4455]5341        for q in Write_sww.sww_quantities:
[4699]5342            print %s in [%f, %f]' %(q,
5343                                       outfile.variables[q+Write_sww.RANGE][0],
5344                                       outfile.variables[q+Write_sww.RANGE][1])
[4280]5345        print '------------------------------------------------'
[4699]5346
[4704]5347
[4455]5348       
5349def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
[4280]5350                         mean_stage=0, zscale=1,
5351                         verbose=False):   
5352    #Time stepping
5353    stage = outfile.variables['stage']
5354    xmomentum = outfile.variables['xmomentum']
5355    ymomentum = outfile.variables['ymomentum']
5356
5357    n = len(has)
5358    j=0
5359    for ha, ua, va in map(None, has, uas, vas):
5360        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5361        w = zscale*ha + mean_stage
5362        stage[j] = w
5363        h = w - elevation
5364        xmomentum[j] = ua*h
[4348]5365        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
[4280]5366        j += 1
[4455]5367   
[4303]5368def urs2txt(basename_in, location_index=None):
[4301]5369    """
5370    Not finished or tested
5371    """
[4378]5372   
5373    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5374                basename_in + EAST_VELOCITY_LABEL,
5375                basename_in + NORTH_VELOCITY_LABEL]
[4301]5376    quantities = ['HA','UA','VA']
5377
[4303]5378    d = ","
5379   
[4301]5380    # instanciate urs_points of the three mux files.
5381    mux = {}
5382    for quantity, file in map(None, quantities, files_in):
5383        mux[quantity] = Urs_points(file)
[4280]5384       
[4301]5385    # Could check that the depth is the same. (hashing)
5386
5387    # handle to a mux file to do depth stuff
5388    a_mux = mux[quantities[0]]
5389   
5390    # Convert to utm
[4303]5391    latitudes = a_mux.lonlatdep[:,1]
5392    longitudes = a_mux.lonlatdep[:,0]
[4301]5393    points_utm, zone = convert_from_latlon_to_utm( \
[4303]5394        latitudes=latitudes, longitudes=longitudes)
[4301]5395    #print "points_utm", points_utm
5396    #print "zone", zone
[4303]5397    depths = a_mux.lonlatdep[:,2]  #
[4301]5398   
5399    fid = open(basename_in + '.txt', 'w')
5400
[4303]5401    fid.write("zone: " + str(zone) + "\n")
[4301]5402
[4303]5403    if location_index is not None:
5404        #Title
5405        li = location_index
5406        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5407                  'Northing' + "\n")
5408        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5409              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5410              str(points_utm[li][01]) + "\n")
5411
5412    # the non-time dependent stuff
5413    #Title
5414    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
[5403]5415              'Northing' +d+ 'depth m' + "\n")
[4303]5416    i = 0
5417    for depth, point_utm, lat, long in map(None, depths,
5418                                               points_utm, latitudes,
5419                                               longitudes):
5420       
5421        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5422                  str(point_utm[01]) +d+ str(depth) + "\n")
5423        i +=1
5424    #Time dependent
5425    if location_index is not None:
5426        time_step = a_mux.time_step
5427        i = 0
5428        #Title
5429        fid.write('time' +d+ 'HA depth m'+d+ \
[4665]5430                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
[4303]5431                      + "\n")
5432        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5433            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5434                      str(UA[location_index]) +d+ str(VA[location_index]) \
5435                      + "\n")
5436           
5437            i +=1
[5347]5438
5439class Write_sts:
5440
5441    sts_quantities = ['stage','xmomentum','ymomentum']
5442
5443
5444    RANGE = '_range'
5445    EXTREMA = ':extrema'
[4301]5446   
[5347]5447    def __init__(self):
5448        pass
5449
5450    def store_header(self,
5451                     outfile,
5452                     times,
5453                     number_of_points,
5454                     description='Converted from URS mux2 format',
5455                     sts_precision=Float32,
5456                     verbose=False):
5457        """
5458        outfile - the name of the file that will be written
5459        times - A list of the time slice times OR a start time
5460        Note, if a list is given the info will be made relative.
5461        number_of_points - the number of urs gauges sites
5462        """
5463
5464        outfile.institution = 'Geoscience Australia'
5465        outfile.description = description
5466       
5467        try:
5468            revision_number = get_revision_number()
5469        except:
5470            revision_number = None
5471        # Allow None to be stored as a string               
5472        outfile.revision_number = str(revision_number) 
5473       
5474        # times - A list or array of the time slice times OR a start time
5475        # Start time in seconds since the epoch (midnight 1/1/1970)
5476
5477        # This is being used to seperate one number from a list.
5478        # what it is actually doing is sorting lists from numeric arrays.
5479        if type(times) is list or type(times) is ArrayType: 
5480            number_of_times = len(times)
5481            times = ensure_numeric(times) 
5482            if number_of_times == 0:
5483                starttime = 0
5484            else:
5485                starttime = times[0]
5486                times = times - starttime  #Store relative times
5487        else:
5488            number_of_times = 0
5489            starttime = times
5490
5491        outfile.starttime = starttime
5492
5493        # Dimension definitions
5494        outfile.createDimension('number_of_points', number_of_points)
5495        outfile.createDimension('number_of_timesteps', number_of_times)
5496        outfile.createDimension('numbers_in_range', 2)
5497
5498        # Variable definitions
5499        outfile.createVariable('x', sts_precision, ('number_of_points',))
5500        outfile.createVariable('y', sts_precision, ('number_of_points',))
5501        outfile.createVariable('elevation', sts_precision,('number_of_points',))
5502
5503        q = 'elevation'
5504        outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5505                               ('numbers_in_range',))
5506
5507        # Initialise ranges with small and large sentinels.
5508        # If this was in pure Python we could have used None sensibly
5509        outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5510        outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5511
5512        outfile.createVariable('z', sts_precision, ('number_of_points',))
5513        # Doing sts_precision instead of Float gives cast errors.
5514        outfile.createVariable('time', Float, ('number_of_timesteps',))
5515
5516        for q in Write_sts.sts_quantities:
5517            outfile.createVariable(q, sts_precision,
5518                                   ('number_of_timesteps',
5519                                    'number_of_points'))
5520            outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5521                                   ('numbers_in_range',))
5522            # Initialise ranges with small and large sentinels.
5523            # If this was in pure Python we could have used None sensibly
5524            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5525            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5526
5527        if type(times) is list or type(times) is ArrayType: 
5528            outfile.variables['time'][:] = times    #Store time relative
5529
5530        if verbose:
5531            print '------------------------------------------------'
5532            print 'Statistics:'
5533            print '    t in [%f, %f], len(t) == %d'\
5534                  %(min(times.flat), max(times.flat), len(times.flat))
5535
5536    def store_points(self,
5537                     outfile,
5538                     points_utm,
5539                     elevation, zone=None, new_origin=None, 
5540                     points_georeference=None, verbose=False):
5541
5542        """
5543        points_utm - currently a list or array of the points in UTM.
5544        points_georeference - the georeference of the points_utm
5545
5546        How about passing new_origin and current_origin.
5547        If you get both, do a convertion from the old to the new.
5548       
5549        If you only get new_origin, the points are absolute,
5550        convert to relative
5551       
5552        if you only get the current_origin the points are relative, store
5553        as relative.
5554       
5555        if you get no georefs create a new georef based on the minimums of
5556        points_utm.  (Another option would be to default to absolute)
5557       
5558        Yes, and this is done in another part of the code.
5559        Probably geospatial.
5560       
5561        If you don't supply either geo_refs, then supply a zone. If not
5562        the default zone will be used.
5563
5564        precondition:
5565             header has been called.
5566        """
5567
5568        number_of_points = len(points_utm)
5569        points_utm = array(points_utm)
5570
5571        # given the two geo_refs and the points, do the stuff
5572        # described in the method header
5573        points_georeference = ensure_geo_reference(points_georeference)
5574        new_origin = ensure_geo_reference(new_origin)
5575       
5576        if new_origin is None and points_georeference is not None:
5577            points = points_utm
5578            geo_ref = points_georeference
5579        else:
5580            if new_origin is None:
5581                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5582                                           min(points_utm[:,1]))
5583            points = new_origin.change_points_geo_ref(points_utm,
5584                                                      points_georeference)
5585            geo_ref = new_origin
5586
5587        # At this stage I need a georef and points
5588        # the points are relative to the georef
5589        geo_ref.write_NetCDF(outfile)
5590
5591        x =  points[:,0]
5592        y =  points[:,1]
5593        z = outfile.variables['z'][:]
5594   
5595        if verbose:
5596            print '------------------------------------------------'
5597            print 'More Statistics:'
5598            print '  Extent (/lon):'
5599            print '    x in [%f, %f], len(lat) == %d'\
5600                  %(min(x), max(x),
5601                    len(x))
5602            print '    y in [%f, %f], len(lon) == %d'\
5603                  %(min(y), max(y),
5604                    len(y))
5605            print '    z in [%f, %f], len(z) == %d'\
5606                  %(min(elevation), max(elevation),
5607                    len(elevation))
5608            print 'geo_ref: ',geo_ref
5609            print '------------------------------------------------'
5610           
5611        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5612        #print "points[:,0]", points[:,0]
5613        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5614        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5615        outfile.variables['z'][:] = elevation
5616        outfile.variables['elevation'][:] = elevation  #FIXME HACK4
5617
5618        q = 'elevation'
5619        # This updates the _range values
5620        outfile.variables[q+Write_sts.RANGE][0] = min(elevation)
5621        outfile.variables[q+Write_sts.RANGE][1] = max(elevation)
5622
5623    def store_quantities(self, outfile, sts_precision=Float32,
5624                         slice_index=None, time=None,
5625                         verbose=False, **quant):
5626       
5627        """
5628        Write the quantity info.
5629
5630        **quant is extra keyword arguments passed in. These must be
5631          the sts quantities, currently; stage.
5632       
5633        if the time array is already been built, use the slice_index
5634        to specify the index.
5635       
5636        Otherwise, use time to increase the time dimension
5637
5638        Maybe make this general, but the viewer assumes these quantities,
5639        so maybe we don't want it general - unless the viewer is general
5640       
5641        precondition:
5642            triangulation and
5643            header have been called.
5644        """
5645        if time is not None:
5646            file_time = outfile.variables['time']
5647            slice_index = len(file_time)
5648            file_time[slice_index] = time   
5649
5650        # Write the conserved quantities from Domain.
5651        # Typically stage,  xmomentum, ymomentum
5652        # other quantities will be ignored, silently.
5653        # Also write the ranges: stage_range
5654        for q in Write_sts.sts_quantities:
5655            if not quant.has_key(q):
5656                msg = 'STS file can not write quantity %s' %q
5657                raise NewQuantity, msg
5658            else:
5659                q_values = quant[q]
5660                outfile.variables[q][slice_index] = \
5661                                q_values.astype(sts_precision)
5662
5663                # This updates the _range values
5664                q_range = outfile.variables[q+Write_sts.RANGE][:]
5665                q_values_min = min(q_values)
5666                if q_values_min < q_range[0]:
5667                    outfile.variables[q+Write_sts.RANGE][0] = q_values_min
5668                q_values_max = max(q_values)
5669                if q_values_max > q_range[1]:
5670                    outfile.variables[q+Write_sts.RANGE][1] = q_values_max
5671
5672
5673   
[4271]5674class Urs_points:
5675    """
5676    Read the info in URS mux files.
[4268]5677
[4271]5678    for the quantities heres a correlation between the file names and
5679    what they mean;
5680    z-mux is height above sea level, m
5681    e-mux is velocity is Eastern direction, m/s
[4280]5682    n-mux is velocity is Northern direction, m/s   
[4271]5683    """
[4268]5684    def __init__(self,urs_file):
[4271]5685        self.iterated = False
[4268]5686        columns = 3 # long, lat , depth
5687        mux_file = open(urs_file, 'rb')
5688       
5689        # Number of points/stations
5690        (self.points_num,)= unpack('i',mux_file.read(4))
5691       
5692        # nt, int - Number of time steps
5693        (self.time_step_count,)= unpack('i',mux_file.read(4))
[4280]5694        #print "self.time_step_count", self.time_step_count
[4268]5695        #dt, float - time step, seconds
5696        (self.time_step,) = unpack('f', mux_file.read(4))
[4280]5697        #print "self.time_step", self.time_step
[4268]5698        msg = "Bad data in the urs file."
5699        if self.points_num < 0:
5700            mux_file.close()
5701            raise ANUGAError, msg
5702        if self.time_step_count < 0:
5703            mux_file.close()
5704            raise ANUGAError, msg
5705        if self.time_step < 0:
5706            mux_file.close()
5707            raise ANUGAError, msg
[4271]5708
5709        # the depth is in meters, and it is the distance from the ocean
5710        # to the sea bottom.
[4268]5711        lonlatdep = p_array.array('f')
5712        lonlatdep.read(mux_file, columns * self.points_num)
5713        lonlatdep = array(lonlatdep, typecode=Float)   
5714        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5715        #print 'lonlatdep',lonlatdep
5716        self.lonlatdep = lonlatdep
[4271]5717       
5718        self.mux_file = mux_file
[4268]5719        # check this array
5720
5721    def __iter__(self):
5722        """
[4271]5723        iterate over quantity data which is with respect to time.
5724
5725        Note: You can only interate once over an object
5726       
5727        returns quantity infomation for each time slice
[4268]5728        """
[4271]5729        msg =  "You can only interate once over a urs file."
5730        assert not self.iterated, msg
[4280]5731        self.iter_time_step = 0
[4271]5732        self.iterated = True
[4268]5733        return self
5734   
5735    def next(self):
[4280]5736        if self.time_step_count == self.iter_time_step:
[4271]5737            self.close()
[4268]5738            raise StopIteration
5739        #Read in a time slice  from mux file 
5740        hz_p_array = p_array.array('f')
[4271]5741        hz_p_array.read(self.mux_file, self.points_num)
[4268]5742        hz_p = array(hz_p_array, typecode=Float)
[4280]5743        self.iter_time_step += 1
[4271]5744       
5745        return hz_p
[4268]5746
5747    def close(self):
5748        self.mux_file.close()
[4500]5749       
[4223]5750    #### END URS UNGRIDDED 2 SWW ###
5751
[4500]5752       
[5070]5753def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',
[4924]5754                         verbose=True):
[5070]5755    """
[4500]5756    Used to store screen output and errors to file, if run on multiple
5757    processes eachprocessor will have its own output and error file.
5758   
5759    extra_info - is used as a string that can identify outputs with another
5760    string eg. '_other'
[4850]5761   
5762    FIXME: Would be good if you could suppress all the screen output and
[5070]5763    only save it to file... however it seems a bit tricky as this capture
[4850]5764    techique response to sys.stdout and by this time it is already printed out.
[5070]5765    """
[4850]5766   
[4500]5767    import sys
[5070]5768#    dir_name = dir_name
5769    if dir_name == None:
5770        dir_name=getcwd()
5771       
[4500]5772    if access(dir_name,W_OK) == 0:
[4850]5773        if verbose: print 'Making directory %s' %dir_name
5774      #  if verbose: print "myid", myid
[4500]5775        mkdir (dir_name,0777)
[5070]5776
[4500]5777    if myid <>'':
5778        myid = '_'+str(myid)
5779    if numprocs <>'':
5780        numprocs = '_'+str(numprocs)
5781    if extra_info <>'':
5782        extra_info = '_'+str(extra_info)
[4924]5783#    print 'hello1'
[5070]5784    screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %(myid,
[4665]5785                                                                numprocs,
[5070]5786                                                                extra_info))
5787    screen_error_name = join(dir_name,  "screen_error%s%s%s.txt" %(myid,
[4665]5788                                                              numprocs,
[5070]5789                                                              extra_info))
[4924]5790
5791    if verbose: print 'Starting ScreenCatcher, all output will be stored in %s' \
5792                                     %(screen_output_name)
[4500]5793    #used to catch screen output to file
5794    sys.stdout = Screen_Catcher(screen_output_name)
5795    sys.stderr = Screen_Catcher(screen_error_name)
5796
5797class Screen_Catcher:
5798    """this simply catches the screen output and stores it to file defined by
5799    start_screen_catcher (above)
5800    """
5801   
5802    def __init__(self, filename):
5803        self.filename = filename
[4924]5804#        print 'init'
[4500]5805        if exists(self.filename)is True:
5806            print'Old existing file "%s" has been deleted' %(self.filename)
5807            remove(self.filename)
5808
5809    def write(self, stuff):
5810        fid = open(self.filename, 'a')
5811        fid.write(stuff)
[4924]5812        fid.close()
5813       
[5391]5814# FIXME (DSG): Add unit test, make general, not just 2 files,
5815# but any number of files.
[4850]5816def copy_code_files(dir_name, filename1, filename2=None):
[4500]5817    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5818    information management
5819    filename1 and filename2 are both absolute pathnames   
5820    """
5821
5822    if access(dir_name,F_OK) == 0:
5823        print 'Make directory %s' %dir_name
5824        mkdir (dir_name,0777)
5825    shutil.copy(filename1, dir_name + sep + basename(filename1))
[4850]5826    if filename2!=None:
5827        shutil.copy(filename2, dir_name + sep + basename(filename2))
5828        print 'Files %s and %s copied' %(filename1, filename2)
5829    else:
5830        print 'File %s copied' %(filename1)
[4500]5831
5832def get_data_from_file(filename,separator_value = ','):
5833    """
5834    Read in data information from file and
5835   
5836    Returns:
5837        header_fields, a string? of the first line separated
5838        by the 'separator_value'
5839       
5840        data, a array (N data columns X M lines) in the file
5841        excluding the header
5842       
5843    NOTE: wont deal with columns with different lenghts and there must be
5844    no blank lines at the end.
5845    """
5846   
5847    fid = open(filename)
5848    lines = fid.readlines()
5849   
5850    fid.close()
5851   
5852    header_line = lines[0]
5853    header_fields = header_line.split(separator_value)
5854
5855    #array to store data, number in there is to allow float...
5856    #i'm sure there is a better way!
5857    data=array([],typecode=Float)
5858    data=resize(data,((len(lines)-1),len(header_fields)))
5859#    print 'number of fields',range(len(header_fields))
5860#    print 'number of lines',len(lines), shape(data)
5861#    print'data',data[1,1],header_line
5862
5863    array_number = 0
5864    line_number = 1
5865    while line_number < (len(lines)):
5866        for i in range(len(header_fields)): 
5867            #this get line below the header, explaining the +1
5868            #and also the line_number can be used as the array index
5869            fields = lines[line_number].split(separator_value)
5870            #assign to array
5871            data[array_number,i] = float(fields[i])
5872           
5873        line_number = line_number +1
5874        array_number = array_number +1
5875       
5876    return header_fields, data
5877
5878def store_parameters(verbose=False,**kwargs):
5879    """
[4519]5880    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5881    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5882   
[4500]5883    Must have a file_name keyword arg, this is what is writing to.
5884    might be a better way to do this using CSV module Writer and writeDict
5885   
[4665]5886    writes file to "output_dir" unless "completed" is in kwargs, then
5887    it writes to "file_name" kwargs
[4519]5888
[4500]5889    """
5890    import types
[4595]5891#    import os
[4500]5892   
5893    # Check that kwargs is a dictionary
5894    if type(kwargs) != types.DictType:
5895        raise TypeError
5896   
[4519]5897    #is completed is kwargs?
[4500]5898    try:
5899        kwargs['completed']
5900        completed=True
5901    except:
5902        completed=False
5903 
5904    #get file name and removes from dict and assert that a file_name exists
5905    if completed:
5906        try:
[4519]5907            file = str(kwargs['file_name'])
[4500]5908        except:
5909            raise 'kwargs must have file_name'
5910    else:
[4519]5911        #write temp file in output directory
[4500]5912        try:
[4519]5913            file = str(kwargs['output_dir'])+'detail_temp.csv'
[4500]5914        except:
5915            raise 'kwargs must have output_dir'
5916       
5917    #extracts the header info and the new line info
5918    line=''
5919    header=''
5920    count=0
5921    keys = kwargs.keys()
5922    keys.sort()
5923   
5924    #used the sorted keys to create the header and line data
5925    for k in keys:
[4519]5926#        print "%s = %s" %(k, kwargs[k])
[4500]5927        header = header+str(k)
5928        line = line+str(kwargs[k])
5929        count+=1
5930        if count <len(kwargs):
5931            header = header+','
5932            line = line+','
[4519]5933    header+='\n'
5934    line+='\n'
[4500]5935
5936    # checks the header info, if the same, then write, if not create a new file
5937    #try to open!
5938    try:
5939        fid = open(file,"r")
5940        file_header=fid.readline()
5941        fid.close()
5942        if verbose: print 'read file header %s' %file_header
5943       
5944    except:
5945        msg = 'try to create new file',file
5946        if verbose: print msg
5947        #tries to open file, maybe directory is bad
5948        try:
5949            fid = open(file,"w")
[4519]5950            fid.write(header)
[4500]5951            fid.close()
5952            file_header=header
5953        except:
5954            msg = 'cannot create new file',file
5955            raise msg
5956           
5957    #if header is same or this is a new file
[4519]5958    if file_header==str(header):
[4500]5959        fid=open(file,"a")
5960        #write new line
[4519]5961        fid.write(line)
[4500]5962        fid.close()
5963    else:
[4665]5964        #backup plan,
5965        # if header is different and has completed will append info to
[4500]5966        #end of details_temp.cvs file in output directory
5967        file = str(kwargs['output_dir'])+'detail_temp.csv'
5968        fid=open(file,"a")
[4519]5969        fid.write(header)
5970        fid.write(line)
[4500]5971        fid.close()
[4519]5972        if verbose: print 'file',file_header.strip('\n')
5973        if verbose: print 'head',header.strip('\n')
5974        if file_header.strip('\n')==str(header): print 'they equal'
[4500]5975        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5976        print msg
5977
5978
[4551]5979
[5226]5980# ----------------------------------------------
5981# Functions to obtain diagnostics from sww files
5982#-----------------------------------------------
[4551]5983
[5276]5984def get_mesh_and_quantities_from_file(filename,
5985                                      quantities=None,
5986                                      verbose=False):
[5226]5987    """Get and rebuild mesh structure and the associated quantities from sww file
[5276]5988
5989    Input:
5990        filename - Name os sww file
5991        quantities - Names of quantities to load
5992
5993    Output:
5994        mesh - instance of class Interpolate
5995               (including mesh and interpolation functionality)
5996        quantities - arrays with quantity values at each mesh node
5997        time - vector of stored timesteps
[5226]5998    """
[5276]5999 
6000    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
6001   
[5226]6002    import types
[5276]6003    from Scientific.IO.NetCDF import NetCDFFile
6004    from shallow_water import Domain
6005    from Numeric import asarray, transpose, resize
6006    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
[5226]6007
[5276]6008    if verbose: print 'Reading from ', filename
6009    fid = NetCDFFile(filename, 'r')    # Open existing file for read
[5307]6010    time = fid.variables['time'][:]    # Time vector
[5276]6011    time += fid.starttime[0]
[5226]6012   
[5276]6013    # Get the variables as Numeric arrays
6014    x = fid.variables['x'][:]                   # x-coordinates of nodes
6015    y = fid.variables['y'][:]                   # y-coordinates of nodes
6016    elevation = fid.variables['elevation'][:]   # Elevation
6017    stage = fid.variables['stage'][:]           # Water level
6018    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
6019    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
[5226]6020
6021
[5276]6022    # Mesh (nodes (Mx2), triangles (Nx3))
6023    nodes = concatenate( (x[:, NewAxis], y[:, NewAxis]), axis=1 )
6024    triangles = fid.variables['volumes'][:]
6025   
6026    # Get geo_reference
6027    try:
6028        geo_reference = Geo_reference(NetCDFObject=fid)
6029    except: #AttributeError, e:
6030        # Sww files don't have to have a geo_ref
6031        geo_reference = None
[5226]6032
[5276]6033    if verbose: print '    building mesh from sww file %s' %filename
6034    boundary = None
[5226]6035
[5276]6036    #FIXME (Peter Row): Should this be in mesh?
6037    if fid.smoothing != 'Yes':
6038        nodes = nodes.tolist()
6039        triangles = triangles.tolist()
6040        nodes, triangles, boundary=weed(nodes, triangles, boundary)
[5226]6041
6042
6043    try:
[5276]6044        mesh = Mesh(nodes, triangles, boundary,
6045                    geo_reference=geo_reference)
6046    except AssertionError, e:
6047        fid.close()
6048        msg = 'Domain could not be created: %s. "' %e
6049        raise DataDomainError, msg
[5226]6050
6051
6052    quantities = {}
[5276]6053    quantities['elevation'] = elevation
6054    quantities['stage'] = stage   
6055    quantities['xmomentum'] = xmomentum
6056    quantities['ymomentum'] = ymomentum   
[5336]6057
6058    fid.close()
[5276]6059    return mesh, quantities, time
[5226]6060
6061
6062
6063def get_flow_through_cross_section(filename,
6064                                   polyline,
6065                                   verbose=False):
[5276]6066    """Obtain flow (m^3/s) perpendicular to specified cross section.
[5226]6067
6068    Inputs:
6069        filename: Name of sww file
6070        polyline: Representation of desired cross section - it may contain multiple
[5288]6071                  sections allowing for complex shapes. Assume absolute UTM coordinates.
6072                  Format [[x0, y0], [x1, y1], ...]
[5226]6073
6074    Output:
[5288]6075        time: All stored times in sww file
[5276]6076        Q: Hydrograph of total flow across given segments for all stored times.
[5226]6077
[5276]6078    The normal flow is computed for each triangle intersected by the polyline and
[5288]6079    added up.  Multiple segments at different angles are specified the normal flows
[5276]6080    may partially cancel each other.
[5226]6081
[5288]6082    The typical usage of this function would be to get flow through a channel,
6083    and the polyline would then be a cross section perpendicular to the flow.
6084
[5226]6085    """
6086
[5288]6087    from anuga.fit_interpolate.interpolate import Interpolation_function
6088
6089    quantity_names =['elevation',
6090                     'stage',
6091                     'xmomentum',
6092                     'ymomentum']
6093
[5226]6094    # Get mesh and quantities from sww file
[5276]6095    X = get_mesh_and_quantities_from_file(filename,
[5288]6096                                          quantities=quantity_names,
[5276]6097                                          verbose=verbose)
6098    mesh, quantities, time = X
[5226]6099
6100
[5288]6101    # Adjust polyline to mesh spatial origin
6102    polyline = mesh.geo_reference.get_relative(polyline)
[5226]6103   
6104    # Find all intersections and associated triangles.
[5321]6105    segments = mesh.get_intersecting_segments(polyline, verbose=verbose)
[5288]6106    #print
6107    #for x in segments:
6108    #    print x
6109
[5226]6110   
6111    # Then store for each triangle the length of the intersecting segment(s),
[5288]6112    # right hand normal(s) and midpoints as interpolation_points.
6113    interpolation_points = []
6114    for segment in segments:
6115        midpoint = sum(array(segment.segment))/2
6116        interpolation_points.append(midpoint)
[5226]6117
[5288]6118
[5321]6119    if verbose:
6120        print 'Interpolating - ',       
6121        print 'total number of interpolation points = %d'\
6122              %len(interpolation_points)
6123
[5288]6124    I = Interpolation_function(time,
6125                               quantities,
6126                               quantity_names=quantity_names,
6127                               vertex_coordinates=mesh.nodes,
6128                               triangles=mesh.triangles,
6129                               interpolation_points=interpolation_points,
6130                               verbose=verbose)
6131
[5321]6132    if verbose: print 'Computing hydrograph'
[5288]6133    # Compute hydrograph
6134    Q = []
6135    for t in time:
6136        total_flow=0
6137        for i, p in enumerate(interpolation_points):
6138            elevation, stage, uh, vh = I(t, point_id=i)
6139            normal = segments[i].normal
6140
6141            # Inner product of momentum vector with segment normal [m^2/s]
6142            normal_momentum = uh*normal[0] + vh*normal[1] 
6143
6144            # Flow across this segment [m^3/s]
6145            segment_flow = normal_momentum*segments[i].length
6146
6147            # Accumulate
6148            total_flow += segment_flow
6149             
6150
6151        # Store flow at this timestep   
6152        Q.append(total_flow)
6153   
6154
6155
[5276]6156    return time, Q
6157   
[5226]6158
6159
[5276]6160
6161
[4554]6162def get_maximum_inundation_elevation(filename,
6163                                     polygon=None,
6164                                     time_interval=None,
6165                                     verbose=False):
6166   
6167    """Return highest elevation where depth > 0
6168   
6169    Usage:
6170    max_runup = get_maximum_inundation_elevation(filename,
6171                                                 polygon=None,
6172                                                 time_interval=None,
6173                                                 verbose=False)
6174
[4665]6175    filename is a NetCDF sww file containing ANUGA model output.   
6176    Optional arguments polygon and time_interval restricts the maximum
6177    runup calculation
[4554]6178    to a points that lie within the specified polygon and time interval.
6179
6180    If no inundation is found within polygon and time_interval the return value
6181    is None signifying "No Runup" or "Everything is dry".
6182
6183    See general function get_maximum_inundation_data for details.
6184   
6185    """
6186   
6187    runup, _ = get_maximum_inundation_data(filename,
6188                                           polygon=polygon,
6189                                           time_interval=time_interval,
6190                                           verbose=verbose)
6191    return runup
6192
6193
6194
6195
6196def get_maximum_inundation_location(filename,
6197                                    polygon=None,
6198                                    time_interval=None,
6199                                    verbose=False):
6200    """Return location of highest elevation where h > 0
6201   
6202   
6203    Usage:
6204    max_runup_location = get_maximum_inundation_location(filename,
6205                                                         polygon=None,
6206                                                         time_interval=None,
6207                                                         verbose=False)
6208
6209    filename is a NetCDF sww file containing ANUGA model output.
[4665]6210    Optional arguments polygon and time_interval restricts the maximum
6211    runup calculation
[4554]6212    to a points that lie within the specified polygon and time interval.
6213
6214    If no inundation is found within polygon and time_interval the return value
6215    is None signifying "No Runup" or "Everything is dry".
6216
6217    See general function get_maximum_inundation_data for details.
6218    """
6219   
6220    _, max_loc = get_maximum_inundation_data(filename,
6221                                             polygon=polygon,
6222                                             time_interval=time_interval,
6223                                             verbose=verbose)
6224    return max_loc
6225   
6226
6227
6228def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
[4567]6229                                use_centroid_values=False,
[4554]6230                                verbose=False):
[4551]6231    """Compute maximum run up height from sww file.
6232
[4554]6233
6234    Usage:
6235    runup, location = get_maximum_inundation_data(filename,
6236                                                  polygon=None,
6237                                                  time_interval=None,
6238                                                  verbose=False)
6239   
6240
[4665]6241    Algorithm is as in get_maximum_inundation_elevation from
6242    shallow_water_domain
[4551]6243    except that this function works with the sww file and computes the maximal
6244    runup height over multiple timesteps.
6245   
[4665]6246    Optional arguments polygon and time_interval restricts the
6247    maximum runup calculation
6248    to a points that lie within the specified polygon and time interval.
6249    Polygon is
[4563]6250    assumed to be in (absolute) UTM coordinates in the same zone as domain.
[4554]6251
6252    If no inundation is found within polygon and time_interval the return value
6253    is None signifying "No Runup" or "Everything is dry".
[4551]6254    """
6255
6256    # We are using nodal values here as that is what is stored in sww files.
6257
6258    # Water depth below which it is considered to be 0 in the model
6259    # FIXME (Ole): Allow this to be specified as a keyword argument as well
[4554]6260
6261    from anuga.utilities.polygon import inside_polygon   
[4551]6262    from anuga.config import minimum_allowed_height
[4595]6263    from Scientific.IO.NetCDF import NetCDFFile
[4551]6264
[4595]6265    dir, base = os.path.split(filename)
6266           
6267    iterate_over = get_all_swwfiles(dir,base)
[4551]6268   
6269    # Read sww file
6270    if verbose: 
[4554]6271        print 'Reading from %s' %filename
[4563]6272        # FIXME: Use general swwstats (when done)
[4551]6273   
[4595]6274    maximal_runup = None
6275    maximal_runup_location = None
[4563]6276   
[4595]6277    for file, swwfile in enumerate (iterate_over):
[4554]6278       
[4595]6279        # Read sww file
6280        filename = join(dir,swwfile+'.sww')
[4554]6281       
[4595]6282        if verbose: 
6283            print 'Reading from %s' %filename
6284            # FIXME: Use general swwstats (when done)
6285               
6286        fid = NetCDFFile(filename)
6287   
6288        # Get geo_reference
6289        # sww files don't have to have a geo_ref
6290        try:
6291            geo_reference = Geo_reference(NetCDFObject=fid)
6292        except AttributeError, e:
6293            geo_reference = Geo_reference() # Default georef object
6294           
6295        xllcorner = geo_reference.get_xllcorner()
6296        yllcorner = geo_reference.get_yllcorner()
6297        zone = geo_reference.get_zone()
[4554]6298       
[4595]6299        # Get extent
6300        volumes = fid.variables['volumes'][:]   
6301        x = fid.variables['x'][:] + xllcorner
6302        y = fid.variables['y'][:] + yllcorner
[4551]6303   
[4595]6304   
[4863]6305        # Get the relevant quantities (Convert from single precison)
6306        elevation = array(fid.variables['elevation'][:], Float) 
6307        stage = array(fid.variables['stage'][:], Float)
[4595]6308   
6309   
[4665]6310        # Here's where one could convert nodal information to centroid
6311        # information
[4595]6312        # but is probably something we need to write in C.
6313        # Here's a Python thought which is NOT finished!!!
[4567]6314        if use_centroid_values is True:
[4595]6315            x = get_centroid_values(x, volumes)
6316            y = get_centroid_values(y, volumes)   
6317            elevation = get_centroid_values(elevation, volumes)   
6318   
6319   
6320        # Spatial restriction
6321        if polygon is not None:
6322            msg = 'polygon must be a sequence of points.'
6323            assert len(polygon[0]) == 2, msg
[4665]6324            # FIXME (Ole): Make a generic polygon input check in polygon.py
6325            # and call it here
[4595]6326           
6327            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
6328   
6329            point_indices = inside_polygon(points, polygon)
6330   
6331            # Restrict quantities to polygon
6332            elevation = take(elevation, point_indices)
6333            stage = take(stage, point_indices, axis=1)
6334   
6335            # Get info for location of maximal runup
6336            points_in_polygon = take(points, point_indices)
6337            x = points_in_polygon[:,0]
6338            y = points_in_polygon[:,1]       
[4567]6339        else:
[4595]6340            # Take all points
6341            point_indices = arange(len(x))
[4567]6342           
[4551]6343   
[4595]6344        # Temporal restriction
6345        time = fid.variables['time'][:]
6346        all_timeindices = arange(len(time))       
6347        if time_interval is not None:
6348           
6349            msg = 'time_interval must be a sequence of length 2.'
6350            assert len(time_interval) == 2, msg
[4665]6351            msg = 'time_interval %s must not be decreasing.' %(time_interval)
[4595]6352            assert time_interval[1] >= time_interval[0], msg
6353           
6354            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
6355            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
6356                   %(time[0], time[-1])
6357            if time_interval[1] < time[0]: raise ValueError(msg)
6358            if time_interval[0] > time[-1]: raise ValueError(msg)
6359   
6360            # Take time indices corresponding to interval (& is bitwise AND)
6361            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
6362                                 all_timeindices)
6363   
6364   
6365            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
6366            assert not alltrue(timesteps == 0), msg
6367   
6368   
6369        else:
6370            # Take them all
6371            timesteps = all_timeindices
6372       
6373   
6374        fid.close()
6375   
6376        # Compute maximal runup for each timestep
6377        #maximal_runup = None
6378        #maximal_runup_location = None
6379        #maximal_runups = [None]
6380        #maximal_runup_locations = [None]
6381       
6382        for i in timesteps:
6383            if use_centroid_values is True:
[4665]6384                stage_i = get_centroid_values(stage[i,:], volumes)   
[4595]6385            else:
6386                stage_i = stage[i,:]
6387               
6388            depth = stage_i  - elevation
6389       
6390            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
6391            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
6392   
6393            if alltrue(wet_nodes == 0):
6394                runup = None
6395            else:   
6396                # Find maximum elevation among wet nodes
6397                wet_elevation = take(elevation, wet_nodes)
6398   
6399                runup_index = argmax(wet_elevation)
6400                runup = max(wet_elevation)
[4862]6401                assert wet_elevation[runup_index] == runup # Must be True
6402            #print "runup", runup
6403            #print "maximal_runup", maximal_runup
6404           
[4595]6405            if runup > maximal_runup:
6406                maximal_runup = runup      # This works even if maximal_runups is None
6407                #print "NEW RUNUP",runup
6408   
6409                # Record location
6410                wet_x = take(x, wet_nodes)
6411                wet_y = take(y, wet_nodes)           
6412                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
6413   
6414    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
[4554]6415    return maximal_runup, maximal_runup_location
[4551]6416
[4586]6417def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
6418    '''
[4595]6419    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6420    will accept base_name with or without the extension ".sww"
[4586]6421   
6422    Returns: a list of strings
[4595]6423       
6424    Usage:     iterate_over = get_all_swwfiles(dir, name)
6425    then
6426               for swwfile in iterate_over:
6427                   do stuff
6428                   
6429    Check "export_grids" and "get_maximum_inundation_data" for examples
[4586]6430    '''
[4595]6431   
6432    #plus tests the extension
6433    name, extension = os.path.splitext(base_name)
6434
6435    if extension <>'' and extension <> '.sww':
6436        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
6437        raise IOError, msg
6438
[4586]6439    if look_in_dir == "":
6440        look_in_dir = "." # Unix compatibility
6441   
6442    dir_ls = os.listdir(look_in_dir)
6443    #print 'dir_ls',dir_ls, base
[4595]6444    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
[4586]6445    if len(iterate_over) == 0:
6446        msg = 'No files of the base name %s'\
[4595]6447              %(name)
[4586]6448        raise IOError, msg
6449    if verbose: print 'iterate over %s' %(iterate_over)
[4554]6450
[4586]6451    return iterate_over
6452
[4636]6453def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
6454    '''
6455    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6456   
6457   
6458    Returns: a list of strings
6459       
6460    Usage:     iterate_over = get_all_swwfiles(dir, name)
6461    then
6462               for swwfile in iterate_over:
6463                   do stuff
6464                   
6465    Check "export_grids" and "get_maximum_inundation_data" for examples
6466    '''
6467   
6468    #plus tests the extension
6469    name, ext = os.path.splitext(base_name)
6470#    print 'look_in_dir',look_in_dir
[4586]6471
[4636]6472    if ext <>'' and ext <> extension:
6473        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
6474        raise IOError, msg
[4598]6475
[4636]6476    if look_in_dir == "":
6477        look_in_dir = "." # Unix compatibility
6478#    print 'look_in_dir',look_in_dir, getcwd()
6479    dir_ls = os.listdir(look_in_dir)
6480    #print 'dir_ls',dir_ls, base_name
6481    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
6482    if len(iterate_over) == 0:
6483        msg = 'No files of the base name %s in %s'\
6484              %(name, look_in_dir)
6485        raise IOError, msg
6486    if verbose: print 'iterate over %s' %(iterate_over)
6487
6488    return iterate_over
6489
6490def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
6491    '''
6492    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6493   
6494   
6495    Returns: a list of strings
6496       
6497    Usage:     iterate_over = get_all_swwfiles(dir, name)
6498    then
6499               for swwfile in iterate_over:
6500                   do stuff
6501                   
6502    Check "export_grids" and "get_maximum_inundation_data" for examples
6503    '''
6504   
6505    #plus tests the extension
6506
6507    if look_in_dir == "":
6508        look_in_dir = "." # Unix compatibility
6509#    print 'look_in_dir',look_in_dir
6510    dir_ls = os.listdir(look_in_dir)
6511#    print 'dir_ls',dir_ls
6512    iterate_over = [x for x in dir_ls if base_name in x]
6513    if len(iterate_over) == 0:
6514        msg = 'No files of the base name %s'\
6515              %(name)
6516        raise IOError, msg
6517    if verbose: print 'iterate over %s' %(iterate_over)
6518
6519    return iterate_over
6520
[5189]6521def points2polygon(points_file,
6522                    minimum_triangle_angle=3.0):
6523    """
6524    WARNING: This function is not fully working. 
6525   
6526    Function to return a polygon returned from alpha shape, given a points file.
6527   
6528    WARNING: Alpha shape returns multiple polygons, but this function only returns one polygon.
6529   
6530    """
6531    from anuga.pmesh.mesh import Mesh, importMeshFromFile
6532    from anuga.shallow_water import Domain   
6533    from anuga.pmesh.mesh_interface import create_mesh_from_regions
6534   
6535    mesh = importMeshFromFile(points_file)
6536    mesh.auto_segment()
6537    mesh.exportASCIIsegmentoutlinefile("outline.tsh")
6538    mesh2 = importMeshFromFile("outline.tsh")
6539    mesh2.generate_mesh(maximum_triangle_area=1000000000, minimum_triangle_angle=minimum_triangle_angle, verbose=False)
6540    mesh2.export_mesh_file('outline_meshed.tsh')
6541    domain = Domain("outline_meshed.tsh", use_cache = False)
6542    polygon =  domain.get_boundary_polygon()
6543    return polygon
[4636]6544
[3292]6545#-------------------------------------------------------------
[4500]6546if __name__ == "__main__":
6547    #setting umask from config to force permissions for all files and directories
6548    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
6549    # set in your .bashrc etc file)
6550    from config import umask
6551    import os 
6552    os.umask(umask)
Note: See TracBrowser for help on using the repository browser.