source: anuga_core/source/anuga/shallow_water/data_manager.py @ 5409

Last change on this file since 5409 was 5409, checked in by kristy, 16 years ago
File size: 215.5 KB
RevLine 
[2852]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
[4663]13.csv: ASCII format for storing arbitrary points and associated attributes
[2852]14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
[3535]33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
[2852]34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
[3560]47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
[2852]48
49"""
50
51import exceptions
[3292]52class TitleValueError(exceptions.Exception): pass
[2852]53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
[4455]57class NewQuantity(exceptions.Exception): pass
[2852]58
59
60
[3292]61import csv
[4598]62import os, sys
[4500]63import shutil
[3720]64from struct import unpack
65import array as p_array
[4271]66#import time, os
[4595]67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
[2852]68
[4595]69
[4868]70from Numeric import concatenate, array, Float, Int, Int32, resize, \
71     sometrue, searchsorted, zeros, allclose, around, reshape, \
72     transpose, sort, NewAxis, ArrayType, compress, take, arange, \
73     argmax, alltrue, shape, Float32, size
[4595]74
75import string
76
[3720]77from Scientific.IO.NetCDF import NetCDFFile
[4500]78#from shutil import copy
[4595]79from os.path import exists, basename, join
[4636]80from os import getcwd
[2852]81
82
[4271]83from anuga.coordinate_transforms.redfearn import redfearn, \
84     convert_from_latlon_to_utm
[4387]85from anuga.coordinate_transforms.geo_reference import Geo_reference, \
[4455]86     write_NetCDF_georeference, ensure_geo_reference
[4382]87from anuga.geospatial_data.geospatial_data import Geospatial_data,\
88     ensure_absolute
[3642]89from anuga.config import minimum_storable_height as default_minimum_storable_height
[4699]90from anuga.config import max_float
[4271]91from anuga.utilities.numerical_tools import ensure_numeric,  mean
[3720]92from anuga.caching.caching import myhash
93from anuga.utilities.anuga_exceptions import ANUGAError
[4271]94from anuga.shallow_water import Domain
95from anuga.abstract_2d_finite_volumes.pmesh2domain import \
96     pmesh_to_domain_instance
[4480]97from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
[4567]98     remove_lone_verts, sww2timeseries, get_centroid_values
[4497]99from anuga.load_mesh.loadASCII import export_mesh_file
[5226]100from anuga.utilities.polygon import intersection
101
102
[4462]103# formula mappings
104
105quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
106                    'depth':'stage-elevation',
107                    'speed': \
108 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
109
110
[4271]111   
[2852]112def make_filename(s):
[4612]113    """Transform argument string into a Sexsuitable filename
[2852]114    """
115
116    s = s.strip()
117    s = s.replace(' ', '_')
118    s = s.replace('(', '')
119    s = s.replace(')', '')
120    s = s.replace('__', '_')
121
122    return s
123
124
125def check_dir(path, verbose=None):
126    """Check that specified path exists.
127    If path does not exist it will be created if possible
128
129    USAGE:
130       checkdir(path, verbose):
131
132    ARGUMENTS:
133        path -- Directory
134        verbose -- Flag verbose output (default: None)
135
136    RETURN VALUE:
137        Verified path including trailing separator
138
139    """
140
141    import os.path
142
143    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
144        unix = 0
145    else:
146        unix = 1
147
148
149    if path[-1] != os.sep:
150        path = path + os.sep  # Add separator for directories
151
152    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
153    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
154        try:
155            exitcode=os.mkdir(path)
156
157            # Change access rights if possible
158            #
159            if unix:
160                exitcode=os.system('chmod 775 '+path)
161            else:
162                pass  # FIXME: What about acces rights under Windows?
163
164            if verbose: print 'MESSAGE: Directory', path, 'created.'
165
166        except:
167            print 'WARNING: Directory', path, 'could not be created.'
168            if unix:
169                path = '/tmp/'
170            else:
171                path = 'C:'
172
173            print 'Using directory %s instead' %path
174
175    return(path)
176
177
178
179def del_dir(path):
180    """Recursively delete directory path and all its contents
181    """
182
183    import os
184
185    if os.path.isdir(path):
186        for file in os.listdir(path):
187            X = os.path.join(path, file)
188
189
190            if os.path.isdir(X) and not os.path.islink(X):
191                del_dir(X)
192            else:
193                try:
194                    os.remove(X)
195                except:
196                    print "Could not remove file %s" %X
197
198        os.rmdir(path)
[4598]199       
200       
201# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
[4603]202def rmgeneric(path, __func__,verbose=False):
203    ERROR_STR= """Error removing %(path)s, %(error)s """
[2852]204
[4603]205    try:
206        __func__(path)
207        if verbose: print 'Removed ', path
208    except OSError, (errno, strerror):
209        print ERROR_STR % {'path' : path, 'error': strerror }
210           
211def removeall(path,verbose=False):
[2852]212
[4603]213    if not os.path.isdir(path):
214        return
215   
216    files=os.listdir(path)
[2852]217
[4603]218    for x in files:
219        fullpath=os.path.join(path, x)
220        if os.path.isfile(fullpath):
221            f=os.remove
222            rmgeneric(fullpath, f)
223        elif os.path.isdir(fullpath):
224            removeall(fullpath)
225            f=os.rmdir
226            rmgeneric(fullpath, f,verbose)
227
228
229
[2852]230def create_filename(datadir, filename, format, size=None, time=None):
231
232    import os
[3514]233    #from anuga.config import data_dir
[2852]234
235    FN = check_dir(datadir) + filename
236
237    if size is not None:
238        FN += '_size%d' %size
239
240    if time is not None:
241        FN += '_time%.2f' %time
242
243    FN += '.' + format
244    return FN
245
246
247def get_files(datadir, filename, format, size):
[4595]248    """Get all file (names) with given name, size and format
[2852]249    """
250
251    import glob
252
253    import os
[3514]254    #from anuga.config import data_dir
[2852]255
256    dir = check_dir(datadir)
257
258    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
259    return glob.glob(pattern)
260
261
262
263#Generic class for storing output to e.g. visualisation or checkpointing
264class Data_format:
265    """Generic interface to data formats
266    """
267
268
269    def __init__(self, domain, extension, mode = 'w'):
270        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
271                                        '''   'w' (write)'''+\
272                                        '''   'r' (read)''' +\
273                                        '''   'a' (append)'''
274
275        #Create filename
276        self.filename = create_filename(domain.get_datadir(),
[3928]277                                        domain.get_name(), extension)
[2852]278
279        #print 'F', self.filename
280        self.timestep = 0
281        self.domain = domain
[3928]282       
[2852]283
284
[3928]285        # Exclude ghosts in case this is a parallel domain
286        self.number_of_nodes = domain.number_of_full_nodes       
287        self.number_of_volumes = domain.number_of_full_triangles
288        #self.number_of_volumes = len(domain)       
289
290
291
292
[2852]293        #FIXME: Should we have a general set_precision function?
294
295
296
297#Class for storing output to e.g. visualisation
298class Data_format_sww(Data_format):
299    """Interface to native NetCDF format (.sww) for storing model output
300
301    There are two kinds of data
302
303    1: Constant data: Vertex coordinates and field values. Stored once
304    2: Variable data: Conserved quantities. Stored once per timestep.
305
306    All data is assumed to reside at vertex locations.
307    """
308
309
310    def __init__(self, domain, mode = 'w',\
311                 max_size = 2000000000,
312                 recursion = False):
313        from Scientific.IO.NetCDF import NetCDFFile
314        from Numeric import Int, Float, Float32
315
316        self.precision = Float32 #Use single precision for quantities
317        if hasattr(domain, 'max_size'):
318            self.max_size = domain.max_size #file size max is 2Gig
319        else:
320            self.max_size = max_size
321        self.recursion = recursion
322        self.mode = mode
323
324        Data_format.__init__(self, domain, 'sww', mode)
325
[3642]326        if hasattr(domain, 'minimum_storable_height'):
[4704]327            self.minimum_storable_height = domain.minimum_storable_height
[3529]328        else:
[3642]329            self.minimum_storable_height = default_minimum_storable_height
[2852]330
331        # NetCDF file definition
332        fid = NetCDFFile(self.filename, mode)
333
334        if mode == 'w':
[4455]335            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
336            self.writer = Write_sww()
[4704]337            self.writer.store_header(fid,
338                                     domain.starttime,
339                                     self.number_of_volumes,
340                                     self.domain.number_of_full_nodes,
341                                     description=description,
342                                     smoothing=domain.smooth,
[4862]343                                     order=domain.default_order,
344                                     sww_precision=self.precision)
[4704]345
346            # Extra optional information
[4455]347            if hasattr(domain, 'texture'):
[4704]348                fid.texture = domain.texture
[2852]349
[4704]350            if domain.quantities_to_be_monitored is not None:
351                fid.createDimension('singleton', 1)
[4705]352                fid.createDimension('two', 2)               
353
354                poly = domain.monitor_polygon
355                if poly is not None:
356                    N = len(poly)
357                    fid.createDimension('polygon_length', N)
[4706]358                    fid.createVariable('extrema.polygon',
[4705]359                                       self.precision,
360                                       ('polygon_length',
361                                        'two'))
[4706]362                    fid.variables['extrema.polygon'][:] = poly                                   
363
[4705]364                   
365                interval = domain.monitor_time_interval
366                if interval is not None:
[4706]367                    fid.createVariable('extrema.time_interval',
[4705]368                                       self.precision,
369                                       ('two',))
[4706]370                    fid.variables['extrema.time_interval'][:] = interval
[4705]371
372               
[4704]373                for q in domain.quantities_to_be_monitored:
374                    #print 'doing', q
[4706]375                    fid.createVariable(q+'.extrema', self.precision,
[4704]376                                       ('numbers_in_range',))
[4706]377                    fid.createVariable(q+'.min_location', self.precision,
[4704]378                                       ('numbers_in_range',))
[4706]379                    fid.createVariable(q+'.max_location', self.precision,
[4704]380                                       ('numbers_in_range',))
[4706]381                    fid.createVariable(q+'.min_time', self.precision,
[4704]382                                       ('singleton',))
[4706]383                    fid.createVariable(q+'.max_time', self.precision,
[4704]384                                       ('singleton',))
[2852]385
[4704]386                   
[2852]387        fid.close()
388
389
390    def store_connectivity(self):
391        """Specialisation of store_connectivity for net CDF format
392
393        Writes x,y,z coordinates of triangles constituting
394        the bed elevation.
395        """
396
397        from Scientific.IO.NetCDF import NetCDFFile
398
399        from Numeric import concatenate, Int
400
401        domain = self.domain
402
403        #Get NetCDF
404        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
405
406        # Get the variables
407        x = fid.variables['x']
408        y = fid.variables['y']
409        z = fid.variables['elevation']
410
411        volumes = fid.variables['volumes']
412
413        # Get X, Y and bed elevation Z
414        Q = domain.quantities['elevation']
415        X,Y,Z,V = Q.get_vertex_values(xy=True,
[3926]416                                      precision=self.precision)
[2852]417
[4455]418        #
419        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
[4704]420        self.writer.store_triangulation(fid,
421                                        points,
422                                        V.astype(volumes.typecode()),
423                                        Z,
424                                        points_georeference= \
425                                        domain.geo_reference)
[2852]426
[4704]427        # Close
428        fid.close()
[2852]429
430
[4868]431    def store_timestep(self, names=None):
[2852]432        """Store time and named quantities to file
433        """
[4868]434       
[2852]435        from Scientific.IO.NetCDF import NetCDFFile
436        import types
437        from time import sleep
438        from os import stat
439
440        from Numeric import choose
[4868]441
442
443        if names is None:
444            # Standard shallow water wave equation quantitites in ANUGA
445            names = ['stage', 'xmomentum', 'ymomentum']
[4455]446       
[4704]447        # Get NetCDF       
[2852]448        retries = 0
449        file_open = False
450        while not file_open and retries < 10:
451            try:
[4704]452                fid = NetCDFFile(self.filename, 'a') # Open existing file
[2852]453            except IOError:
[4704]454                # This could happen if someone was reading the file.
455                # In that case, wait a while and try again
[2852]456                msg = 'Warning (store_timestep): File %s could not be opened'\
457                      %self.filename
458                msg += ' - trying step %s again' %self.domain.time
459                print msg
460                retries += 1
461                sleep(1)
462            else:
463                file_open = True
464
465        if not file_open:
466            msg = 'File %s could not be opened for append' %self.filename
467            raise DataFileNotOpenError, msg
468
469
470
[4704]471        # Check to see if the file is already too big:
[2852]472        time = fid.variables['time']
473        i = len(time)+1
474        file_size = stat(self.filename)[6]
475        file_size_increase =  file_size/i
476        if file_size + file_size_increase > self.max_size*(2**self.recursion):
[4704]477            # In order to get the file name and start time correct,
478            # I change the domain.filename and domain.starttime.
479            # This is the only way to do this without changing
480            # other modules (I think).
[2852]481
[4704]482            # Write a filename addon that won't break swollens reader
483            # (10.sww is bad)
[2852]484            filename_ext = '_time_%s'%self.domain.time
485            filename_ext = filename_ext.replace('.', '_')
[4704]486           
487            # Remember the old filename, then give domain a
488            # name with the extension
[3846]489            old_domain_filename = self.domain.get_name()
[2852]490            if not self.recursion:
[3846]491                self.domain.set_name(old_domain_filename+filename_ext)
[2852]492
[3846]493
[4704]494            # Change the domain starttime to the current time
[2852]495            old_domain_starttime = self.domain.starttime
496            self.domain.starttime = self.domain.time
497
[4704]498            # Build a new data_structure.
[2852]499            next_data_structure=\
500                Data_format_sww(self.domain, mode=self.mode,\
501                                max_size = self.max_size,\
502                                recursion = self.recursion+1)
503            if not self.recursion:
504                print '    file_size = %s'%file_size
505                print '    saving file to %s'%next_data_structure.filename
506            #set up the new data_structure
507            self.domain.writer = next_data_structure
508
509            #FIXME - could be cleaner to use domain.store_timestep etc.
510            next_data_structure.store_connectivity()
511            next_data_structure.store_timestep(names)
512            fid.sync()
513            fid.close()
514
515            #restore the old starttime and filename
516            self.domain.starttime = old_domain_starttime
[3846]517            self.domain.set_name(old_domain_filename)           
[2852]518        else:
519            self.recursion = False
520            domain = self.domain
521
522            # Get the variables
523            time = fid.variables['time']
524            stage = fid.variables['stage']
525            xmomentum = fid.variables['xmomentum']
526            ymomentum = fid.variables['ymomentum']
527            i = len(time)
528            if type(names) not in [types.ListType, types.TupleType]:
529                names = [names]
530
[4455]531            if 'stage' in names and 'xmomentum' in names and \
[4704]532               'ymomentum' in names:
[4455]533
[4868]534                # Get stage, elevation, depth and select only those
535                # values where minimum_storable_height is exceeded
[4455]536                Q = domain.quantities['stage']
[4868]537                A, _ = Q.get_vertex_values(xy = False,
538                                           precision = self.precision)
[4455]539                z = fid.variables['elevation']
[4868]540
541                storable_indices = A-z[:] >= self.minimum_storable_height
542                stage = choose(storable_indices, (z[:], A))
[4455]543               
[4868]544                # Define a zero vector of same size and type as A
545                # for use with momenta
546                null = zeros(size(A), A.typecode())
547               
548                # Get xmomentum where depth exceeds minimum_storable_height
[4455]549                Q = domain.quantities['xmomentum']
[4868]550                xmom, _ = Q.get_vertex_values(xy = False,
551                                              precision = self.precision)
552                xmomentum = choose(storable_indices, (null, xmom))
[4455]553               
[4868]554
555                # Get ymomentum where depth exceeds minimum_storable_height
[4455]556                Q = domain.quantities['ymomentum']
[4868]557                ymom, _ = Q.get_vertex_values(xy = False,
558                                              precision = self.precision)
559                ymomentum = choose(storable_indices, (null, ymom))               
[4704]560               
561                # Write quantities to NetCDF
562                self.writer.store_quantities(fid, 
563                                             time=self.domain.time,
[4862]564                                             sww_precision=self.precision,
[4704]565                                             stage=stage,
566                                             xmomentum=xmomentum,
567                                             ymomentum=ymomentum)
[4455]568            else:
[4868]569                msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
570                msg += ' Instead I got: ' + str(names)
571                raise Exception, msg
572           
[2852]573
[3946]574
[4704]575            # Update extrema if requested
576            domain = self.domain
577            if domain.quantities_to_be_monitored is not None:
578                for q, info in domain.quantities_to_be_monitored.items():
579
580                    if info['min'] is not None:
[4706]581                        fid.variables[q + '.extrema'][0] = info['min']
582                        fid.variables[q + '.min_location'][:] =\
[4704]583                                        info['min_location']
[4706]584                        fid.variables[q + '.min_time'][0] = info['min_time']
[4704]585                       
586                    if info['max'] is not None:
[4706]587                        fid.variables[q + '.extrema'][1] = info['max']
588                        fid.variables[q + '.max_location'][:] =\
[4704]589                                        info['max_location']
[4706]590                        fid.variables[q + '.max_time'][0] = info['max_time']
[4704]591
592           
593
[4868]594            # Flush and close
[2852]595            fid.sync()
596            fid.close()
597
598
599
[4704]600# Class for handling checkpoints data
[2852]601class Data_format_cpt(Data_format):
602    """Interface to native NetCDF format (.cpt)
603    """
604
605
606    def __init__(self, domain, mode = 'w'):
607        from Scientific.IO.NetCDF import NetCDFFile
608        from Numeric import Int, Float, Float
609
610        self.precision = Float #Use full precision
611
612        Data_format.__init__(self, domain, 'sww', mode)
613
614
615        # NetCDF file definition
616        fid = NetCDFFile(self.filename, mode)
617
618        if mode == 'w':
619            #Create new file
620            fid.institution = 'Geoscience Australia'
621            fid.description = 'Checkpoint data'
622            #fid.smooth = domain.smooth
623            fid.order = domain.default_order
624
625            # dimension definitions
626            fid.createDimension('number_of_volumes', self.number_of_volumes)
627            fid.createDimension('number_of_vertices', 3)
628
629            #Store info at all vertices (no smoothing)
630            fid.createDimension('number_of_points', 3*self.number_of_volumes)
631            fid.createDimension('number_of_timesteps', None) #extensible
632
633            # variable definitions
634
635            #Mesh
636            fid.createVariable('x', self.precision, ('number_of_points',))
637            fid.createVariable('y', self.precision, ('number_of_points',))
638
639
640            fid.createVariable('volumes', Int, ('number_of_volumes',
641                                                'number_of_vertices'))
642
643            fid.createVariable('time', self.precision,
644                               ('number_of_timesteps',))
645
646            #Allocate space for all quantities
647            for name in domain.quantities.keys():
648                fid.createVariable(name, self.precision,
649                                   ('number_of_timesteps',
650                                    'number_of_points'))
651
652        #Close
653        fid.close()
654
655
656    def store_checkpoint(self):
657        """
658        Write x,y coordinates of triangles.
659        Write connectivity (
660        constituting
661        the bed elevation.
662        """
663
664        from Scientific.IO.NetCDF import NetCDFFile
665
666        from Numeric import concatenate
667
668        domain = self.domain
669
670        #Get NetCDF
671        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
672
673        # Get the variables
674        x = fid.variables['x']
675        y = fid.variables['y']
676
677        volumes = fid.variables['volumes']
678
679        # Get X, Y and bed elevation Z
680        Q = domain.quantities['elevation']
681        X,Y,Z,V = Q.get_vertex_values(xy=True,
682                      precision = self.precision)
683
684
685
686        x[:] = X.astype(self.precision)
687        y[:] = Y.astype(self.precision)
688        z[:] = Z.astype(self.precision)
689
690        volumes[:] = V
691
692        #Close
693        fid.close()
694
695
696    def store_timestep(self, name):
697        """Store time and named quantity to file
698        """
699        from Scientific.IO.NetCDF import NetCDFFile
700        from time import sleep
701
702        #Get NetCDF
703        retries = 0
704        file_open = False
705        while not file_open and retries < 10:
706            try:
707                fid = NetCDFFile(self.filename, 'a') #Open existing file
708            except IOError:
709                #This could happen if someone was reading the file.
710                #In that case, wait a while and try again
711                msg = 'Warning (store_timestep): File %s could not be opened'\
712                  %self.filename
713                msg += ' - trying again'
714                print msg
715                retries += 1
716                sleep(1)
717            else:
718                file_open = True
719
720        if not file_open:
721            msg = 'File %s could not be opened for append' %self.filename
722            raise DataFileNotOPenError, msg
723
724
725        domain = self.domain
726
727        # Get the variables
728        time = fid.variables['time']
729        stage = fid.variables['stage']
730        i = len(time)
731
732        #Store stage
733        time[i] = self.domain.time
734
735        # Get quantity
736        Q = domain.quantities[name]
737        A,V = Q.get_vertex_values(xy=False,
[4704]738                                  precision = self.precision)
[2852]739
740        stage[i,:] = A.astype(self.precision)
741
742        #Flush and close
743        fid.sync()
744        fid.close()
745
746
[4326]747#### NED is national exposure database (name changed to NEXIS)
[3292]748   
749LAT_TITLE = 'LATITUDE'
750LONG_TITLE = 'LONGITUDE'
[3336]751X_TITLE = 'x'
752Y_TITLE = 'y'
[3292]753class Exposure_csv:
754    def __init__(self,file_name, latitude_title=LAT_TITLE,
[3398]755                 longitude_title=LONG_TITLE, is_x_y_locations=None,
[3336]756                 x_title=X_TITLE, y_title=Y_TITLE,
[4326]757                 refine_polygon=None, title_check_list=None):
[3292]758        """
[3296]759        This class is for handling the exposure csv file.
760        It reads the file in and converts the lats and longs to a geospatial
761        data object.
762        Use the methods to read and write columns.
763
764        The format of the csv files it reads is;
765           The first row is a title row.
766           comma's are the delimiters
767           each column is a 'set' of data
768
769        Feel free to use/expand it to read other csv files.
770           
771           
772        It is not for adding and deleting rows
773       
[3292]774        Can geospatial handle string attributes? It's not made for them.
775        Currently it can't load and save string att's.
776
777        So just use geospatial to hold the x, y and georef? Bad, since
778        different att's are in diferent structures.  Not so bad, the info
779        to write if the .csv file is saved is in attribute_dic
780
781        The location info is in the geospatial attribute.
782       
783       
784        """
785        self._file_name = file_name
786        self._geospatial = None #
787
788        # self._attribute_dic is a dictionary.
789        #The keys are the column titles.
790        #The values are lists of column data
791       
792        # self._title_index_dic is a dictionary.
793        #The keys are the column titles.
794        #The values are the index positions of file columns.
795        self._attribute_dic, self._title_index_dic = \
[4612]796            csv2dict(self._file_name, title_check_list=title_check_list)
[3292]797        try:
[4059]798            #Have code here that handles caps or lower
[3292]799            lats = self._attribute_dic[latitude_title]
800            longs = self._attribute_dic[longitude_title]
801           
802        except KeyError:
803            # maybe a warning..
[3398]804            #Let's see if this works..
805            if False != is_x_y_locations:
806                is_x_y_locations = True
[3292]807            pass
808        else:
809            self._geospatial = Geospatial_data(latitudes = lats,
810                 longitudes = longs)
[3336]811
[3398]812        if is_x_y_locations is True:
[3336]813            if self._geospatial is not None:
814                pass #fixme throw an error
815            try:
816                xs = self._attribute_dic[x_title]
817                ys = self._attribute_dic[y_title]
818                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
819            except KeyError:
820                # maybe a warning..
[3664]821                msg = "Could not find location information."
[3336]822                raise TitleValueError, msg
823            else:
824                self._geospatial = Geospatial_data(data_points=points)
[3292]825           
826        # create a list of points that are in the refining_polygon
827        # described by a list of indexes representing the points
828
829    def __cmp__(self, other):
830        #print "self._attribute_dic",self._attribute_dic
831        #print "other._attribute_dic",other._attribute_dic
832        #print "self._title_index_dic", self._title_index_dic
833        #print "other._title_index_dic", other._title_index_dic
834       
835        #check that a is an instance of this class
836        if isinstance(self, type(other)):
837            result = cmp(self._attribute_dic, other._attribute_dic)
838            if result <>0:
839                return result
840            # The order of the columns is important. Therefore..
841            result = cmp(self._title_index_dic, other._title_index_dic)
842            if result <>0:
843                return result
844            for self_ls, other_ls in map(None,self._attribute_dic, \
845                    other._attribute_dic):
846                result = cmp(self._attribute_dic[self_ls],
847                             other._attribute_dic[other_ls])
848                if result <>0:
849                    return result
850            return 0
851        else:
852            return 1
853   
854
855    def get_column(self, column_name, use_refind_polygon=False):
856        """
857        Given a column name return a list of the column values
858
859        Note, the type of the values will be String!
[3437]860        do this to change a list of strings to a list of floats
861        time = [float(x) for x in time]
[3292]862       
863        Not implemented:
864        if use_refind_polygon is True, only return values in the
865        refined polygon
866        """
867        if not self._attribute_dic.has_key(column_name):
868            msg = 'Therer is  no column called %s!' %column_name
869            raise TitleValueError, msg
870        return self._attribute_dic[column_name]
871
[3437]872
873    def get_value(self, value_column_name,
874                  known_column_name,
875                  known_values,
876                  use_refind_polygon=False):
877        """
878        Do linear interpolation on the known_colum, using the known_value,
879        to return a value of the column_value_name.
880        """
881        pass
882
883
[3292]884    def get_location(self, use_refind_polygon=False):
885        """
886        Return a geospatial object which describes the
887        locations of the location file.
888
889        Note, if there is not location info, this returns None.
890       
891        Not implemented:
892        if use_refind_polygon is True, only return values in the
893        refined polygon
894        """
895        return self._geospatial
896
897    def set_column(self, column_name, column_values, overwrite=False):
898        """
899        Add a column to the 'end' (with the right most column being the end)
900        of the csv file.
901
902        Set overwrite to True if you want to overwrite a column.
903       
904        Note, in column_name white space is removed and case is not checked.
905        Precondition
906        The column_name and column_values cannot have comma's in it.
907        """
908       
909        value_row_count = \
910            len(self._attribute_dic[self._title_index_dic.keys()[0]])
911        if len(column_values) <> value_row_count: 
912            msg = 'The number of column values must equal the number of rows.'
913            raise DataMissingValuesError, msg
914       
915        if self._attribute_dic.has_key(column_name):
916            if not overwrite:
917                msg = 'Column name %s already in use!' %column_name
918                raise TitleValueError, msg
919        else:
920            # New title.  Add it to the title index.
921            self._title_index_dic[column_name] = len(self._title_index_dic)
922        self._attribute_dic[column_name] = column_values
923        #print "self._title_index_dic[column_name]",self._title_index_dic
924
925    def save(self, file_name=None):
926        """
927        Save the exposure csv file
928        """
929        if file_name is None:
930            file_name = self._file_name
931       
932        fd = open(file_name,'wb')
933        writer = csv.writer(fd)
934       
935        #Write the title to a cvs file
936        line = [None]* len(self._title_index_dic)
937        for title in self._title_index_dic.iterkeys():
938            line[self._title_index_dic[title]]= title
939        writer.writerow(line)
940       
941        # Write the values to a cvs file
942        value_row_count = \
943            len(self._attribute_dic[self._title_index_dic.keys()[0]])
944        for row_i in range(value_row_count):
945            line = [None]* len(self._title_index_dic)
946            for title in self._title_index_dic.iterkeys():
947                line[self._title_index_dic[title]]= \
948                     self._attribute_dic[title][row_i]
949            writer.writerow(line)
950
951
[4612]952def csv2dict(file_name, title_check_list=None):
953    """
954    Load in the csv as a dic, title as key and column info as value, .
955    Also, create a dic, title as key and column index as value,
956    to keep track of the column order.
[4775]957
958    Two dictionaries are returned.
[4612]959   
960    WARNING: Vaules are returned as strings.
961    do this to change a list of strings to a list of floats
962        time = [float(x) for x in time]
[4775]963
964       
[4612]965    """
966   
967    #
968    attribute_dic = {}
969    title_index_dic = {}
970    titles_stripped = [] # list of titles
971    reader = csv.reader(file(file_name))
972
973    # Read in and manipulate the title info
974    titles = reader.next()
975    for i,title in enumerate(titles):
976        titles_stripped.append(title.strip())
977        title_index_dic[title.strip()] = i
978    title_count = len(titles_stripped)       
979    #print "title_index_dic",title_index_dic
980    if title_check_list is not None:
981        for title_check in title_check_list:
982            #msg = "Reading error.  This row is not present ", title_check
983            #assert title_index_dic.has_key(title_check), msg
984            if not title_index_dic.has_key(title_check):
985                #reader.close()
986                msg = "Reading error.  This row is not present ", \
987                      title_check                     
988                raise IOError, msg
989               
990   
991   
992    #create a dic of colum values, indexed by column title
993    for line in reader:
994        if len(line) <> title_count:
995            raise IOError #FIXME make this nicer
996        for i, value in enumerate(line):
997            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
998       
999    return attribute_dic, title_index_dic
1000
1001
[2852]1002#Auxiliary
1003def write_obj(filename,x,y,z):
1004    """Store x,y,z vectors into filename (obj format)
1005       Vectors are assumed to have dimension (M,3) where
1006       M corresponds to the number elements.
1007       triangles are assumed to be disconnected
1008
1009       The three numbers in each vector correspond to three vertices,
1010
1011       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1012
1013    """
1014    #print 'Writing obj to %s' % filename
1015
1016    import os.path
1017
1018    root, ext = os.path.splitext(filename)
1019    if ext == '.obj':
1020        FN = filename
1021    else:
1022        FN = filename + '.obj'
1023
1024
1025    outfile = open(FN, 'wb')
1026    outfile.write("# Triangulation as an obj file\n")
1027
1028    M, N = x.shape
1029    assert N==3  #Assuming three vertices per element
1030
1031    for i in range(M):
1032        for j in range(N):
1033            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1034
1035    for i in range(M):
1036        base = i*N
1037        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1038
1039    outfile.close()
1040
1041
1042#########################################################
1043#Conversion routines
1044########################################################
1045
1046def sww2obj(basefilename, size):
1047    """Convert netcdf based data output to obj
1048    """
1049    from Scientific.IO.NetCDF import NetCDFFile
1050
1051    from Numeric import Float, zeros
1052
1053    #Get NetCDF
1054    FN = create_filename('.', basefilename, 'sww', size)
1055    print 'Reading from ', FN
1056    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1057
1058
1059    # Get the variables
1060    x = fid.variables['x']
1061    y = fid.variables['y']
1062    z = fid.variables['elevation']
1063    time = fid.variables['time']
1064    stage = fid.variables['stage']
1065
1066    M = size  #Number of lines
1067    xx = zeros((M,3), Float)
1068    yy = zeros((M,3), Float)
1069    zz = zeros((M,3), Float)
1070
1071    for i in range(M):
1072        for j in range(3):
1073            xx[i,j] = x[i+j*M]
1074            yy[i,j] = y[i+j*M]
1075            zz[i,j] = z[i+j*M]
1076
1077    #Write obj for bathymetry
1078    FN = create_filename('.', basefilename, 'obj', size)
1079    write_obj(FN,xx,yy,zz)
1080
1081
1082    #Now read all the data with variable information, combine with
1083    #x,y info and store as obj
1084
1085    for k in range(len(time)):
1086        t = time[k]
1087        print 'Processing timestep %f' %t
1088
1089        for i in range(M):
1090            for j in range(3):
1091                zz[i,j] = stage[k,i+j*M]
1092
1093
1094        #Write obj for variable data
1095        #FN = create_filename(basefilename, 'obj', size, time=t)
1096        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1097        write_obj(FN,xx,yy,zz)
1098
1099
1100def dat2obj(basefilename):
1101    """Convert line based data output to obj
1102    FIXME: Obsolete?
1103    """
1104
1105    import glob, os
[3514]1106    from anuga.config import data_dir
[2852]1107
1108
1109    #Get bathymetry and x,y's
1110    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1111
1112    from Numeric import zeros, Float
1113
1114    M = len(lines)  #Number of lines
1115    x = zeros((M,3), Float)
1116    y = zeros((M,3), Float)
1117    z = zeros((M,3), Float)
1118
1119    ##i = 0
1120    for i, line in enumerate(lines):
1121        tokens = line.split()
1122        values = map(float,tokens)
1123
1124        for j in range(3):
1125            x[i,j] = values[j*3]
1126            y[i,j] = values[j*3+1]
1127            z[i,j] = values[j*3+2]
1128
1129        ##i += 1
1130
1131
1132    #Write obj for bathymetry
1133    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1134
1135
1136    #Now read all the data files with variable information, combine with
1137    #x,y info
1138    #and store as obj
1139
1140    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1141
1142    for filename in files:
1143        print 'Processing %s' % filename
1144
1145        lines = open(data_dir+os.sep+filename,'r').readlines()
1146        assert len(lines) == M
1147        root, ext = os.path.splitext(filename)
1148
1149        #Get time from filename
1150        i0 = filename.find('_time=')
1151        if i0 == -1:
1152            #Skip bathymetry file
1153            continue
1154
1155        i0 += 6  #Position where time starts
1156        i1 = filename.find('.dat')
1157
1158        if i1 > i0:
1159            t = float(filename[i0:i1])
1160        else:
1161            raise DataTimeError, 'Hmmmm'
1162
1163
1164
1165        ##i = 0
1166        for i, line in enumerate(lines):
1167            tokens = line.split()
1168            values = map(float,tokens)
1169
1170            for j in range(3):
1171                z[i,j] = values[j]
1172
1173            ##i += 1
1174
1175        #Write obj for variable data
1176        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1177
1178
1179def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1180    """Read netcdf filename1, pick timesteps first:step:last and save to
1181    nettcdf file filename2
1182    """
1183    from Scientific.IO.NetCDF import NetCDFFile
1184
1185    #Get NetCDF
1186    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1187    outfile = NetCDFFile(filename2, 'w')  #Open new file
1188
1189
1190    #Copy dimensions
1191    for d in infile.dimensions:
1192        outfile.createDimension(d, infile.dimensions[d])
1193
1194    for name in infile.variables:
1195        var = infile.variables[name]
1196        outfile.createVariable(name, var.typecode(), var.dimensions)
1197
1198
1199    #Copy the static variables
1200    for name in infile.variables:
1201        if name == 'time' or name == 'stage':
1202            pass
1203        else:
1204            #Copy
1205            outfile.variables[name][:] = infile.variables[name][:]
1206
1207    #Copy selected timesteps
1208    time = infile.variables['time']
1209    stage = infile.variables['stage']
1210
1211    newtime = outfile.variables['time']
1212    newstage = outfile.variables['stage']
1213
1214    if last is None:
1215        last = len(time)
1216
1217    selection = range(first, last, step)
1218    for i, j in enumerate(selection):
1219        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1220        newtime[i] = time[j]
1221        newstage[i,:] = stage[j,:]
1222
1223    #Close
1224    infile.close()
1225    outfile.close()
1226
1227
1228#Get data objects
1229def get_dataobject(domain, mode='w'):
1230    """Return instance of class of given format using filename
1231    """
1232
1233    cls = eval('Data_format_%s' %domain.format)
1234    return cls(domain, mode)
1235
1236
1237
1238
1239def dem2pts(basename_in, basename_out=None,
1240            easting_min=None, easting_max=None,
1241            northing_min=None, northing_max=None,
1242            use_cache=False, verbose=False,):
1243    """Read Digitial Elevation model from the following NetCDF format (.dem)
1244
1245    Example:
1246
1247    ncols         3121
1248    nrows         1800
1249    xllcorner     722000
1250    yllcorner     5893000
1251    cellsize      25
1252    NODATA_value  -9999
1253    138.3698 137.4194 136.5062 135.5558 ..........
1254
1255    Convert to NetCDF pts format which is
1256
1257    points:  (Nx2) Float array
1258    elevation: N Float array
1259    """
1260
1261
1262
1263    kwargs = {'basename_out': basename_out,
1264              'easting_min': easting_min,
1265              'easting_max': easting_max,
1266              'northing_min': northing_min,
1267              'northing_max': northing_max,
1268              'verbose': verbose}
1269
1270    if use_cache is True:
1271        from caching import cache
1272        result = cache(_dem2pts, basename_in, kwargs,
1273                       dependencies = [basename_in + '.dem'],
1274                       verbose = verbose)
1275
1276    else:
1277        result = apply(_dem2pts, [basename_in], kwargs)
1278
1279    return result
1280
1281
1282def _dem2pts(basename_in, basename_out=None, verbose=False,
1283            easting_min=None, easting_max=None,
1284            northing_min=None, northing_max=None):
1285    """Read Digitial Elevation model from the following NetCDF format (.dem)
1286
1287    Internal function. See public function dem2pts for details.
1288    """
1289
[4776]1290    # FIXME: Can this be written feasibly using write_pts?
[2852]1291
1292    import os
1293    from Scientific.IO.NetCDF import NetCDFFile
1294    from Numeric import Float, zeros, reshape, sum
1295
1296    root = basename_in
1297
[4776]1298    # Get NetCDF
1299    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
[2852]1300
1301    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1302
1303    ncols = infile.ncols[0]
1304    nrows = infile.nrows[0]
[4776]1305    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1306    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
[2852]1307    cellsize = infile.cellsize[0]
1308    NODATA_value = infile.NODATA_value[0]
1309    dem_elevation = infile.variables['elevation']
1310
1311    zone = infile.zone[0]
1312    false_easting = infile.false_easting[0]
1313    false_northing = infile.false_northing[0]
1314
[4776]1315    # Text strings
[2852]1316    projection = infile.projection
1317    datum = infile.datum
1318    units = infile.units
1319
1320
[4776]1321    # Get output file
[2852]1322    if basename_out == None:
1323        ptsname = root + '.pts'
1324    else:
1325        ptsname = basename_out + '.pts'
1326
1327    if verbose: print 'Store to NetCDF file %s' %ptsname
1328    # NetCDF file definition
1329    outfile = NetCDFFile(ptsname, 'w')
1330
[4776]1331    # Create new file
[2852]1332    outfile.institution = 'Geoscience Australia'
1333    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1334                      'of spatial point data'
[4776]1335    # Assign default values
[2852]1336    if easting_min is None: easting_min = xllcorner
1337    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1338    if northing_min is None: northing_min = yllcorner
1339    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1340
[4776]1341    # Compute offsets to update georeferencing
[2852]1342    easting_offset = xllcorner - easting_min
1343    northing_offset = yllcorner - northing_min
1344
[4776]1345    # Georeferencing
[2852]1346    outfile.zone = zone
[4776]1347    outfile.xllcorner = easting_min # Easting of lower left corner
1348    outfile.yllcorner = northing_min # Northing of lower left corner
[2852]1349    outfile.false_easting = false_easting
1350    outfile.false_northing = false_northing
1351
1352    outfile.projection = projection
1353    outfile.datum = datum
1354    outfile.units = units
1355
1356
[4776]1357    # Grid info (FIXME: probably not going to be used, but heck)
[2852]1358    outfile.ncols = ncols
1359    outfile.nrows = nrows
1360
1361    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1362    totalnopoints = nrows*ncols
1363
[4776]1364    # Calculating number of NODATA_values for each row in clipped region
1365    # FIXME: use array operations to do faster
[2852]1366    nn = 0
1367    k = 0
1368    i1_0 = 0
1369    j1_0 = 0
1370    thisj = 0
1371    thisi = 0
1372    for i in range(nrows):
1373        y = (nrows-i-1)*cellsize + yllcorner
1374        for j in range(ncols):
1375            x = j*cellsize + xllcorner
1376            if easting_min <= x <= easting_max and \
1377               northing_min <= y <= northing_max:
1378                thisj = j
1379                thisi = i
1380                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1381
1382                if k == 0:
1383                    i1_0 = i
1384                    j1_0 = j
1385                k += 1
1386
1387    index1 = j1_0
1388    index2 = thisj
1389
[4776]1390    # Dimension definitions
[2852]1391    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1392    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1393
1394    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1395    nopoints = clippednopoints-nn
1396
1397    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1398
[3664]1399    if verbose:
[2852]1400        print 'There are %d values in the elevation' %totalnopoints
1401        print 'There are %d values in the clipped elevation' %clippednopoints
1402        print 'There are %d NODATA_values in the clipped elevation' %nn
1403
1404    outfile.createDimension('number_of_points', nopoints)
1405    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1406
[4776]1407    # Variable definitions
[2852]1408    outfile.createVariable('points', Float, ('number_of_points',
1409                                             'number_of_dimensions'))
1410    outfile.createVariable('elevation', Float, ('number_of_points',))
1411
1412    # Get handles to the variables
1413    points = outfile.variables['points']
1414    elevation = outfile.variables['elevation']
1415
1416    lenv = index2-index1+1
[4776]1417    # Store data
[2852]1418    global_index = 0
[4776]1419    # for i in range(nrows):
[2852]1420    for i in range(i1_0,thisi+1,1):
1421        if verbose and i%((nrows+10)/10)==0:
1422            print 'Processing row %d of %d' %(i, nrows)
1423
1424        lower_index = global_index
1425
1426        v = dem_elevation_r[i,index1:index2+1]
1427        no_NODATA = sum(v == NODATA_value)
1428        if no_NODATA > 0:
[4776]1429            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
[2852]1430        else:
[4776]1431            newcols = lenv # ncols_in_bounding_box
[2852]1432
1433        telev = zeros(newcols, Float)
1434        tpoints = zeros((newcols, 2), Float)
1435
1436        local_index = 0
1437
1438        y = (nrows-i-1)*cellsize + yllcorner
1439        #for j in range(ncols):
1440        for j in range(j1_0,index2+1,1):
1441
1442            x = j*cellsize + xllcorner
1443            if easting_min <= x <= easting_max and \
1444               northing_min <= y <= northing_max and \
1445               dem_elevation_r[i,j] <> NODATA_value:
1446                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1447                telev[local_index] = dem_elevation_r[i, j]
1448                global_index += 1
1449                local_index += 1
1450
1451        upper_index = global_index
1452
1453        if upper_index == lower_index + newcols:
1454            points[lower_index:upper_index, :] = tpoints
1455            elevation[lower_index:upper_index] = telev
1456
1457    assert global_index == nopoints, 'index not equal to number of points'
1458
1459    infile.close()
1460    outfile.close()
1461
1462
1463
1464def _read_hecras_cross_sections(lines):
1465    """Return block of surface lines for each cross section
1466    Starts with SURFACE LINE,
1467    Ends with END CROSS-SECTION
1468    """
1469
1470    points = []
1471
1472    reading_surface = False
1473    for i, line in enumerate(lines):
1474
1475        if len(line.strip()) == 0:    #Ignore blanks
1476            continue
1477
1478        if lines[i].strip().startswith('SURFACE LINE'):
1479            reading_surface = True
1480            continue
1481
1482        if lines[i].strip().startswith('END') and reading_surface:
1483            yield points
1484            reading_surface = False
1485            points = []
1486
1487        if reading_surface:
1488            fields = line.strip().split(',')
1489            easting = float(fields[0])
1490            northing = float(fields[1])
1491            elevation = float(fields[2])
1492            points.append([easting, northing, elevation])
1493
1494
1495
1496
1497def hecras_cross_sections2pts(basename_in,
1498                              basename_out=None,
1499                              verbose=False):
1500    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1501
1502    Example:
1503
1504
1505# RAS export file created on Mon 15Aug2005 11:42
1506# by HEC-RAS Version 3.1.1
1507
1508BEGIN HEADER:
1509  UNITS: METRIC
1510  DTM TYPE: TIN
1511  DTM: v:\1\cit\perth_topo\river_tin
1512  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1513  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1514  MAP PROJECTION: UTM
1515  PROJECTION ZONE: 50
1516  DATUM: AGD66
1517  VERTICAL DATUM:
1518  NUMBER OF REACHES:  19
1519  NUMBER OF CROSS-SECTIONS:  14206
1520END HEADER:
1521
1522
1523Only the SURFACE LINE data of the following form will be utilised
1524
1525  CROSS-SECTION:
1526    STREAM ID:Southern-Wungong
1527    REACH ID:Southern-Wungong
1528    STATION:19040.*
1529    CUT LINE:
1530      405548.671603161 , 6438142.7594925
1531      405734.536092045 , 6438326.10404912
1532      405745.130459356 , 6438331.48627354
1533      405813.89633823 , 6438368.6272789
1534    SURFACE LINE:
1535     405548.67,   6438142.76,   35.37
1536     405552.24,   6438146.28,   35.41
1537     405554.78,   6438148.78,   35.44
1538     405555.80,   6438149.79,   35.44
1539     405559.37,   6438153.31,   35.45
1540     405560.88,   6438154.81,   35.44
1541     405562.93,   6438156.83,   35.42
1542     405566.50,   6438160.35,   35.38
1543     405566.99,   6438160.83,   35.37
1544     ...
1545   END CROSS-SECTION
1546
1547    Convert to NetCDF pts format which is
1548
1549    points:  (Nx2) Float array
1550    elevation: N Float array
1551    """
1552
1553    import os
1554    from Scientific.IO.NetCDF import NetCDFFile
1555    from Numeric import Float, zeros, reshape
1556
1557    root = basename_in
1558
1559    #Get ASCII file
1560    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1561
1562    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1563
1564    lines = infile.readlines()
1565    infile.close()
1566
1567    if verbose: print 'Converting to pts format'
1568
1569    i = 0
1570    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1571        i += 1
1572
1573    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1574    i += 1
1575
1576    assert lines[i].strip().upper().startswith('UNITS:')
1577    units = lines[i].strip().split()[1]
1578    i += 1
1579
1580    assert lines[i].strip().upper().startswith('DTM TYPE:')
1581    i += 1
1582
1583    assert lines[i].strip().upper().startswith('DTM:')
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('STREAM')
1587    i += 1
1588
1589    assert lines[i].strip().upper().startswith('CROSS')
1590    i += 1
1591
1592    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1593    projection = lines[i].strip().split(':')[1]
1594    i += 1
1595
1596    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1597    zone = int(lines[i].strip().split(':')[1])
1598    i += 1
1599
1600    assert lines[i].strip().upper().startswith('DATUM:')
1601    datum = lines[i].strip().split(':')[1]
1602    i += 1
1603
1604    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1605    i += 1
1606
1607    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1608    i += 1
1609
1610    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1611    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1612    i += 1
1613
1614
1615    #Now read all points
1616    points = []
1617    elevation = []
1618    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1619        for k, entry in enumerate(entries):
1620            points.append(entry[:2])
1621            elevation.append(entry[2])
1622
1623
1624    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1625          %(j+1, number_of_cross_sections)
1626    assert j+1 == number_of_cross_sections, msg
1627
1628    #Get output file
1629    if basename_out == None:
1630        ptsname = root + '.pts'
1631    else:
1632        ptsname = basename_out + '.pts'
1633
[4455]1634    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1635    geo = Geospatial_data(points, {"elevation":elevation},
1636                          verbose=verbose, geo_reference=geo_ref)
1637    geo.export_points_file(ptsname)
[2852]1638
[4462]1639def export_grid(basename_in, extra_name_out = None,
1640                quantities = None, # defaults to elevation
1641                timestep = None,
1642                reduction = None,
1643                cellsize = 10,
1644                NODATA_value = -9999,
1645                easting_min = None,
1646                easting_max = None,
1647                northing_min = None,
1648                northing_max = None,
1649                verbose = False,
1650                origin = None,
1651                datum = 'WGS84',
1652                format = 'ers'):
1653    """
1654   
1655    Wrapper for sww2dem. - see sww2dem to find out what most of the
1656    parameters do.
[2852]1657
[4462]1658    Quantities is a list of quantities.  Each quantity will be
1659    calculated for each sww file.
1660
1661    This returns the basenames of the files returned, which is made up
1662    of the dir and all of the file name, except the extension.
1663
1664    This function returns the names of the files produced.
[4535]1665
1666    It will also produce as many output files as there are input sww files.
[4462]1667    """
1668   
1669    if quantities is None:
1670        quantities = ['elevation']
1671       
1672    if type(quantities) is str:
1673            quantities = [quantities]
1674
1675    # How many sww files are there?
1676    dir, base = os.path.split(basename_in)
[4489]1677    #print "basename_in",basename_in
1678    #print "base",base
[4586]1679
1680    iterate_over = get_all_swwfiles(dir,base,verbose)
[4463]1681   
[4526]1682    if dir == "":
1683        dir = "." # Unix compatibility
[4462]1684   
1685    files_out = []
[4586]1686    #print 'sww_file',iterate_over
[4548]1687    for sww_file in iterate_over:
[4462]1688        for quantity in quantities:
1689            if extra_name_out is None:
1690                basename_out = sww_file + '_' + quantity
1691            else:
1692                basename_out = sww_file + '_' + quantity + '_' \
1693                               + extra_name_out
[4524]1694#            print "basename_out", basename_out
[4462]1695       
[4524]1696            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
[4462]1697                               quantity, 
1698                               timestep,
1699                               reduction,
1700                               cellsize,
1701                               NODATA_value,
1702                               easting_min,
1703                               easting_max,
1704                               northing_min,
1705                               northing_max,
1706                               verbose,
1707                               origin,
1708                               datum,
1709                               format)
1710            files_out.append(file_out)
1711    #print "basenames_out after",basenames_out
1712    return files_out
[4545]1713
1714
1715def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1716                   plot_quantity, generate_fig = False,
1717                   reportname = None, surface = False, time_min = None,
1718                   time_max = None, title_on = False, verbose = True,
1719                   nodes=None):
1720    """
1721    nodes - number of processes used.
1722
1723    warning - this function has no tests
1724    """
1725    if reportname == None:
1726        report = False
1727    else:
1728        report = True
1729       
1730    if nodes is None:
1731        is_parallel = False
1732    else:
1733        is_parallel = True
1734       
1735    # Generate figures
1736    swwfiles = {}
[4462]1737   
[4545]1738    if is_parallel is True:   
1739        for i in range(nodes):
1740            print 'Sending node %d of %d' %(i,nodes)
1741            swwfiles = {}
1742            if not reportname == None:
1743                reportname = report_name + '_%s' %(i)
1744            for label_id in production_dirs.keys():
1745                if label_id == 'boundaries':
1746                    swwfile = best_boundary_sww
1747                else:
1748                    file_loc = output_dir + label_id + sep
1749                    sww_extra = '_P%s_%s' %(i,nodes)
1750                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1751                    print 'swwfile',swwfile
1752                    swwfiles[swwfile] = label_id
1753
1754                texname, elev_output = sww2timeseries(swwfiles,
1755                                              gauges_dir_name,
1756                                              production_dirs,
1757                                              report = report,
1758                                              reportname = reportname,
1759                                              plot_quantity = plot_quantity,
1760                                              generate_fig = generate_fig,
1761                                              surface = surface,
1762                                              time_min = time_min,
1763                                              time_max = time_max,
1764                                              title_on = title_on,
1765                                              verbose = verbose)
1766    else:   
1767        for label_id in production_dirs.keys():       
1768            if label_id == 'boundaries':
1769                print 'boundaries'
1770                file_loc = project.boundaries_in_dir
1771                swwfile = project.boundaries_dir_name3 + '.sww'
1772                #  swwfile = boundary_dir_filename
1773            else:
1774                file_loc = output_dir + label_id + sep
1775                swwfile = file_loc + scenario_name + '.sww'
1776            swwfiles[swwfile] = label_id
1777       
1778        texname, elev_output = sww2timeseries(swwfiles,
1779                                              gauges_dir_name,
1780                                              production_dirs,
1781                                              report = report,
1782                                              reportname = reportname,
1783                                              plot_quantity = plot_quantity,
1784                                              generate_fig = generate_fig,
1785                                              surface = surface,
1786                                              time_min = time_min,
1787                                              time_max = time_max,
1788                                              title_on = title_on,
1789                                              verbose = verbose)
1790                                         
1791
1792   
[2852]1793def sww2dem(basename_in, basename_out = None,
[4462]1794            quantity = None, # defaults to elevation
[2852]1795            timestep = None,
1796            reduction = None,
1797            cellsize = 10,
1798            NODATA_value = -9999,
1799            easting_min = None,
1800            easting_max = None,
1801            northing_min = None,
1802            northing_max = None,
1803            verbose = False,
1804            origin = None,
1805            datum = 'WGS84',
[4462]1806            format = 'ers'):
[2852]1807
[4663]1808    """Read SWW file and convert to Digitial Elevation model format
1809    (.asc or .ers)
[2852]1810
1811    Example (ASC):
1812
1813    ncols         3121
1814    nrows         1800
1815    xllcorner     722000
1816    yllcorner     5893000
1817    cellsize      25
1818    NODATA_value  -9999
1819    138.3698 137.4194 136.5062 135.5558 ..........
1820
1821    Also write accompanying file with same basename_in but extension .prj
1822    used to fix the UTM zone, datum, false northings and eastings.
1823
1824    The prj format is assumed to be as
1825
1826    Projection    UTM
1827    Zone          56
1828    Datum         WGS84
1829    Zunits        NO
1830    Units         METERS
1831    Spheroid      WGS84
1832    Xshift        0.0000000000
1833    Yshift        10000000.0000000000
1834    Parameters
1835
1836
1837    The parameter quantity must be the name of an existing quantity or
1838    an expression involving existing quantities. The default is
[4462]1839    'elevation'. Quantity is not a list of quantities.
[2852]1840
1841    if timestep (an index) is given, output quantity at that timestep
1842
1843    if reduction is given use that to reduce quantity over all timesteps.
1844
1845    datum
1846
1847    format can be either 'asc' or 'ers'
1848    """
1849
1850    import sys
[4663]1851    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1852         sometrue
[2852]1853    from Numeric import array2string
1854
[4663]1855    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1856         separate_points_by_polygon
1857    from anuga.abstract_2d_finite_volumes.util import \
1858         apply_expression_to_dictionary
[2852]1859
1860    msg = 'Format must be either asc or ers'
1861    assert format.lower() in ['asc', 'ers'], msg
1862
1863
1864    false_easting = 500000
1865    false_northing = 10000000
1866
1867    if quantity is None:
1868        quantity = 'elevation'
[4462]1869       
[2852]1870    if reduction is None:
1871        reduction = max
1872
1873    if basename_out is None:
1874        basename_out = basename_in + '_%s' %quantity
1875
[4462]1876    if quantity_formula.has_key(quantity):
1877        quantity = quantity_formula[quantity]
1878       
[2852]1879    swwfile = basename_in + '.sww'
1880    demfile = basename_out + '.' + format
1881    # Note the use of a .ers extension is optional (write_ermapper_grid will
1882    # deal with either option
[4413]1883   
[4548]1884    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
[4413]1885   
[4551]1886    # Read sww file
[4524]1887    if verbose: 
1888        print 'Reading from %s' %swwfile
1889        print 'Output directory is %s' %basename_out
1890   
[2852]1891    from Scientific.IO.NetCDF import NetCDFFile
1892    fid = NetCDFFile(swwfile)
1893
1894    #Get extent and reference
1895    x = fid.variables['x'][:]
1896    y = fid.variables['y'][:]
1897    volumes = fid.variables['volumes'][:]
[3961]1898    if timestep is not None:
1899        times = fid.variables['time'][timestep]
[3967]1900    else:
1901        times = fid.variables['time'][:]
[2852]1902
1903    number_of_timesteps = fid.dimensions['number_of_timesteps']
1904    number_of_points = fid.dimensions['number_of_points']
[3967]1905   
[2852]1906    if origin is None:
1907
[4551]1908        # Get geo_reference
1909        # sww files don't have to have a geo_ref
[2852]1910        try:
1911            geo_reference = Geo_reference(NetCDFObject=fid)
1912        except AttributeError, e:
[4551]1913            geo_reference = Geo_reference() # Default georef object
[2852]1914
1915        xllcorner = geo_reference.get_xllcorner()
1916        yllcorner = geo_reference.get_yllcorner()
1917        zone = geo_reference.get_zone()
1918    else:
1919        zone = origin[0]
1920        xllcorner = origin[1]
1921        yllcorner = origin[2]
1922
1923
1924
[4663]1925    # FIXME: Refactor using code from Interpolation_function.statistics
1926    # (in interpolate.py)
[4551]1927    # Something like print swwstats(swwname)
[2852]1928    if verbose:
1929        print '------------------------------------------------'
1930        print 'Statistics of SWW file:'
1931        print '  Name: %s' %swwfile
1932        print '  Reference:'
1933        print '    Lower left corner: [%f, %f]'\
1934              %(xllcorner, yllcorner)
[3961]1935        if timestep is not None:
1936            print '    Time: %f' %(times)
1937        else:
1938            print '    Start time: %f' %fid.starttime[0]
[2852]1939        print '  Extent:'
1940        print '    x [m] in [%f, %f], len(x) == %d'\
1941              %(min(x.flat), max(x.flat), len(x.flat))
1942        print '    y [m] in [%f, %f], len(y) == %d'\
1943              %(min(y.flat), max(y.flat), len(y.flat))
[3961]1944        if timestep is not None:
1945            print '    t [s] = %f, len(t) == %d' %(times, 1)
1946        else:
1947            print '    t [s] in [%f, %f], len(t) == %d'\
[3967]1948              %(min(times), max(times), len(times))
[2852]1949        print '  Quantities [SI units]:'
[5189]1950        # Comment out for reduced memory consumption
[3967]1951        for name in ['stage', 'xmomentum', 'ymomentum']:
[2852]1952            q = fid.variables[name][:].flat
[3967]1953            if timestep is not None:
1954                q = q[timestep*len(x):(timestep+1)*len(x)]
1955            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1956        for name in ['elevation']:
1957            q = fid.variables[name][:].flat
1958            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1959           
[2891]1960    # Get quantity and reduce if applicable
[2852]1961    if verbose: print 'Processing quantity %s' %quantity
1962
[2891]1963    # Turn NetCDF objects into Numeric arrays
[5115]1964    try:
1965        q = fid.variables[quantity][:] 
[5189]1966       
1967       
[5115]1968    except:
1969        quantity_dict = {}
1970        for name in fid.variables.keys():
1971            quantity_dict[name] = fid.variables[name][:] 
1972        #Convert quantity expression to quantities found in sww file   
1973        q = apply_expression_to_dictionary(quantity, quantity_dict)
[5189]1974    #print "q.shape",q.shape
[2852]1975    if len(q.shape) == 2:
1976        #q has a time component and needs to be reduced along
1977        #the temporal dimension
1978        if verbose: print 'Reducing quantity %s' %quantity
1979        q_reduced = zeros( number_of_points, Float )
[3967]1980       
1981        if timestep is not None:
1982            for k in range(number_of_points):
1983                q_reduced[k] = q[timestep,k] 
1984        else:
1985            for k in range(number_of_points):
1986                q_reduced[k] = reduction( q[:,k] )
[2852]1987
1988        q = q_reduced
1989
1990    #Post condition: Now q has dimension: number_of_points
1991    assert len(q.shape) == 1
1992    assert q.shape[0] == number_of_points
1993
1994    if verbose:
1995        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1996
1997
1998    #Create grid and update xll/yll corner and x,y
1999
2000    #Relative extent
2001    if easting_min is None:
2002        xmin = min(x)
2003    else:
2004        xmin = easting_min - xllcorner
2005
2006    if easting_max is None:
2007        xmax = max(x)
2008    else:
2009        xmax = easting_max - xllcorner
2010
2011    if northing_min is None:
2012        ymin = min(y)
2013    else:
2014        ymin = northing_min - yllcorner
2015
2016    if northing_max is None:
2017        ymax = max(y)
2018    else:
2019        ymax = northing_max - yllcorner
2020
2021
2022
2023    if verbose: print 'Creating grid'
2024    ncols = int((xmax-xmin)/cellsize)+1
2025    nrows = int((ymax-ymin)/cellsize)+1
2026
2027
2028    #New absolute reference and coordinates
2029    newxllcorner = xmin+xllcorner
2030    newyllcorner = ymin+yllcorner
2031
2032    x = x+xllcorner-newxllcorner
2033    y = y+yllcorner-newyllcorner
[3961]2034   
[2852]2035    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2036    assert len(vertex_points.shape) == 2
2037
2038    grid_points = zeros ( (ncols*nrows, 2), Float )
2039
2040
2041    for i in xrange(nrows):
2042        if format.lower() == 'asc':
2043            yg = i*cellsize
2044        else:
2045        #this will flip the order of the y values for ers
2046            yg = (nrows-i)*cellsize
2047
2048        for j in xrange(ncols):
2049            xg = j*cellsize
2050            k = i*ncols + j
2051
2052            grid_points[k,0] = xg
2053            grid_points[k,1] = yg
2054
2055    #Interpolate
[3514]2056    from anuga.fit_interpolate.interpolate import Interpolate
[2852]2057
[4480]2058    # Remove loners from vertex_points, volumes here
2059    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
[4497]2060    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
[4522]2061    #import sys; sys.exit()
[2852]2062    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2063
2064    #Interpolate using quantity values
2065    if verbose: print 'Interpolating'
2066    grid_values = interp.interpolate(q, grid_points).flat
2067
2068
2069    if verbose:
2070        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2071                                                      max(grid_values))
2072
2073    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2074    P = interp.mesh.get_boundary_polygon()
2075    outside_indices = outside_polygon(grid_points, P, closed=True)
2076
2077    for i in outside_indices:
2078        grid_values[i] = NODATA_value
2079
2080
2081
2082
2083    if format.lower() == 'ers':
2084        # setup ERS header information
2085        grid_values = reshape(grid_values,(nrows, ncols))
2086        header = {}
2087        header['datum'] = '"' + datum + '"'
2088        # FIXME The use of hardwired UTM and zone number needs to be made optional
2089        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2090        header['projection'] = '"UTM-' + str(zone) + '"'
2091        header['coordinatetype'] = 'EN'
2092        if header['coordinatetype'] == 'LL':
2093            header['longitude'] = str(newxllcorner)
2094            header['latitude'] = str(newyllcorner)
2095        elif header['coordinatetype'] == 'EN':
2096            header['eastings'] = str(newxllcorner)
2097            header['northings'] = str(newyllcorner)
2098        header['nullcellvalue'] = str(NODATA_value)
2099        header['xdimension'] = str(cellsize)
2100        header['ydimension'] = str(cellsize)
2101        header['value'] = '"' + quantity + '"'
2102        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2103
2104
2105        #Write
2106        if verbose: print 'Writing %s' %demfile
2107        import ermapper_grids
2108        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2109
2110        fid.close()
2111    else:
2112        #Write to Ascii format
2113
2114        #Write prj file
2115        prjfile = basename_out + '.prj'
2116
2117        if verbose: print 'Writing %s' %prjfile
2118        prjid = open(prjfile, 'w')
2119        prjid.write('Projection    %s\n' %'UTM')
2120        prjid.write('Zone          %d\n' %zone)
2121        prjid.write('Datum         %s\n' %datum)
2122        prjid.write('Zunits        NO\n')
2123        prjid.write('Units         METERS\n')
2124        prjid.write('Spheroid      %s\n' %datum)
2125        prjid.write('Xshift        %d\n' %false_easting)
2126        prjid.write('Yshift        %d\n' %false_northing)
2127        prjid.write('Parameters\n')
2128        prjid.close()
2129
2130
2131
2132        if verbose: print 'Writing %s' %demfile
2133
2134        ascid = open(demfile, 'w')
2135
2136        ascid.write('ncols         %d\n' %ncols)
2137        ascid.write('nrows         %d\n' %nrows)
2138        ascid.write('xllcorner     %d\n' %newxllcorner)
2139        ascid.write('yllcorner     %d\n' %newyllcorner)
2140        ascid.write('cellsize      %f\n' %cellsize)
2141        ascid.write('NODATA_value  %d\n' %NODATA_value)
2142
2143
2144        #Get bounding polygon from mesh
2145        #P = interp.mesh.get_boundary_polygon()
2146        #inside_indices = inside_polygon(grid_points, P)
2147
2148        for i in range(nrows):
2149            if verbose and i%((nrows+10)/10)==0:
2150                print 'Doing row %d of %d' %(i, nrows)
2151
2152            base_index = (nrows-i-1)*ncols
2153
2154            slice = grid_values[base_index:base_index+ncols]
2155            s = array2string(slice, max_line_width=sys.maxint)
2156            ascid.write(s[1:-1] + '\n')
2157
2158
2159            #print
2160            #for j in range(ncols):
2161            #    index = base_index+j#
2162            #    print grid_values[index],
2163            #    ascid.write('%f ' %grid_values[index])
2164            #ascid.write('\n')
2165
2166        #Close
2167        ascid.close()
2168        fid.close()
[4462]2169        return basename_out
2170
[5189]2171
[2852]2172#Backwards compatibility
2173def sww2asc(basename_in, basename_out = None,
2174            quantity = None,
2175            timestep = None,
2176            reduction = None,
2177            cellsize = 10,
2178            verbose = False,
2179            origin = None):
2180    print 'sww2asc will soon be obsoleted - please use sww2dem'
2181    sww2dem(basename_in,
2182            basename_out = basename_out,
2183            quantity = quantity,
2184            timestep = timestep,
2185            reduction = reduction,
2186            cellsize = cellsize,
2187            verbose = verbose,
2188            origin = origin,
2189        datum = 'WGS84',
2190        format = 'asc')
2191
2192def sww2ers(basename_in, basename_out = None,
2193            quantity = None,
2194            timestep = None,
2195            reduction = None,
2196            cellsize = 10,
2197            verbose = False,
2198            origin = None,
2199            datum = 'WGS84'):
2200    print 'sww2ers will soon be obsoleted - please use sww2dem'
2201    sww2dem(basename_in,
2202            basename_out = basename_out,
2203            quantity = quantity,
2204            timestep = timestep,
2205            reduction = reduction,
2206            cellsize = cellsize,
2207            verbose = verbose,
2208            origin = origin,
[2891]2209            datum = datum,
2210            format = 'ers')
[2852]2211################################# END COMPATIBILITY ##############
2212
2213
2214
[2891]2215def sww2pts(basename_in, basename_out=None,
2216            data_points=None,
2217            quantity=None,
2218            timestep=None,
2219            reduction=None,
2220            NODATA_value=-9999,
2221            verbose=False,
2222            origin=None):
2223            #datum = 'WGS84')
2224
2225
2226    """Read SWW file and convert to interpolated values at selected points
2227
2228    The parameter quantity' must be the name of an existing quantity or
2229    an expression involving existing quantities. The default is
2230    'elevation'.
2231
2232    if timestep (an index) is given, output quantity at that timestep
2233
2234    if reduction is given use that to reduce quantity over all timesteps.
2235
2236    data_points (Nx2 array) give locations of points where quantity is to be computed.
2237   
2238    """
2239
2240    import sys
2241    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2242    from Numeric import array2string
2243
[3514]2244    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]2245    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2891]2246
[3514]2247    from anuga.geospatial_data.geospatial_data import Geospatial_data
[2891]2248
2249    if quantity is None:
2250        quantity = 'elevation'
2251
2252    if reduction is None:
2253        reduction = max
2254
2255    if basename_out is None:
2256        basename_out = basename_in + '_%s' %quantity
2257
2258    swwfile = basename_in + '.sww'
2259    ptsfile = basename_out + '.pts'
2260
2261    # Read sww file
2262    if verbose: print 'Reading from %s' %swwfile
2263    from Scientific.IO.NetCDF import NetCDFFile
2264    fid = NetCDFFile(swwfile)
2265
2266    # Get extent and reference
2267    x = fid.variables['x'][:]
2268    y = fid.variables['y'][:]
2269    volumes = fid.variables['volumes'][:]
2270
2271    number_of_timesteps = fid.dimensions['number_of_timesteps']
2272    number_of_points = fid.dimensions['number_of_points']
2273    if origin is None:
2274
2275        # Get geo_reference
2276        # sww files don't have to have a geo_ref
2277        try:
2278            geo_reference = Geo_reference(NetCDFObject=fid)
2279        except AttributeError, e:
2280            geo_reference = Geo_reference() #Default georef object
2281
2282        xllcorner = geo_reference.get_xllcorner()
2283        yllcorner = geo_reference.get_yllcorner()
2284        zone = geo_reference.get_zone()
2285    else:
2286        zone = origin[0]
2287        xllcorner = origin[1]
2288        yllcorner = origin[2]
2289
2290
2291
2292    # FIXME: Refactor using code from file_function.statistics
2293    # Something like print swwstats(swwname)
2294    if verbose:
2295        x = fid.variables['x'][:]
2296        y = fid.variables['y'][:]
2297        times = fid.variables['time'][:]
2298        print '------------------------------------------------'
2299        print 'Statistics of SWW file:'
2300        print '  Name: %s' %swwfile
2301        print '  Reference:'
2302        print '    Lower left corner: [%f, %f]'\
2303              %(xllcorner, yllcorner)
2304        print '    Start time: %f' %fid.starttime[0]
2305        print '  Extent:'
2306        print '    x [m] in [%f, %f], len(x) == %d'\
2307              %(min(x.flat), max(x.flat), len(x.flat))
2308        print '    y [m] in [%f, %f], len(y) == %d'\
2309              %(min(y.flat), max(y.flat), len(y.flat))
2310        print '    t [s] in [%f, %f], len(t) == %d'\
2311              %(min(times), max(times), len(times))
2312        print '  Quantities [SI units]:'
2313        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2314            q = fid.variables[name][:].flat
2315            print '    %s in [%f, %f]' %(name, min(q), max(q))
2316
2317
2318
2319    # Get quantity and reduce if applicable
2320    if verbose: print 'Processing quantity %s' %quantity
2321
2322    # Turn NetCDF objects into Numeric arrays
2323    quantity_dict = {}
2324    for name in fid.variables.keys():
2325        quantity_dict[name] = fid.variables[name][:]
2326
2327
2328
2329    # Convert quantity expression to quantities found in sww file   
2330    q = apply_expression_to_dictionary(quantity, quantity_dict)
2331
2332
2333
2334    if len(q.shape) == 2:
2335        # q has a time component and needs to be reduced along
2336        # the temporal dimension
2337        if verbose: print 'Reducing quantity %s' %quantity
2338        q_reduced = zeros( number_of_points, Float )
2339
2340        for k in range(number_of_points):
2341            q_reduced[k] = reduction( q[:,k] )
2342
2343        q = q_reduced
2344
2345    # Post condition: Now q has dimension: number_of_points
2346    assert len(q.shape) == 1
2347    assert q.shape[0] == number_of_points
2348
2349
2350    if verbose:
2351        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2352
2353
2354    # Create grid and update xll/yll corner and x,y
2355    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2356    assert len(vertex_points.shape) == 2
2357
2358    # Interpolate
[3514]2359    from anuga.fit_interpolate.interpolate import Interpolate
[2891]2360    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2361
2362    # Interpolate using quantity values
2363    if verbose: print 'Interpolating'
2364    interpolated_values = interp.interpolate(q, data_points).flat
2365
2366
2367    if verbose:
2368        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2369                                                      max(interpolated_values))
2370
2371    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2372    P = interp.mesh.get_boundary_polygon()
2373    outside_indices = outside_polygon(data_points, P, closed=True)
2374
2375    for i in outside_indices:
2376        interpolated_values[i] = NODATA_value
2377
2378
2379    # Store results   
2380    G = Geospatial_data(data_points=data_points,
2381                        attributes=interpolated_values)
2382
2383    G.export_points_file(ptsfile, absolute = True)
2384
[2931]2385    fid.close()
[2891]2386
2387
[2852]2388def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2389                                  use_cache = False,
2390                                  verbose = False):
2391    """Read Digitial Elevation model from the following ASCII format (.asc)
2392
2393    Example:
2394
2395    ncols         3121
2396    nrows         1800
2397    xllcorner     722000
2398    yllcorner     5893000
2399    cellsize      25
2400    NODATA_value  -9999
2401    138.3698 137.4194 136.5062 135.5558 ..........
2402
2403    Convert basename_in + '.asc' to NetCDF format (.dem)
2404    mimicking the ASCII format closely.
2405
2406
2407    An accompanying file with same basename_in but extension .prj must exist
2408    and is used to fix the UTM zone, datum, false northings and eastings.
2409
2410    The prj format is assumed to be as
2411
2412    Projection    UTM
2413    Zone          56
2414    Datum         WGS84
2415    Zunits        NO
2416    Units         METERS
2417    Spheroid      WGS84
2418    Xshift        0.0000000000
2419    Yshift        10000000.0000000000
2420    Parameters
2421    """
2422
2423
2424
2425    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2426
2427    if use_cache is True:
2428        from caching import cache
2429        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
[4688]2430                       dependencies = [basename_in + '.asc',
2431                                       basename_in + '.prj'],
[2852]2432                       verbose = verbose)
2433
2434    else:
2435        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2436
2437    return result
2438
2439
2440
2441
2442
2443
2444def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2445                                  verbose = False):
2446    """Read Digitial Elevation model from the following ASCII format (.asc)
2447
2448    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2449    """
2450
2451    import os
2452    from Scientific.IO.NetCDF import NetCDFFile
2453    from Numeric import Float, array
2454
2455    #root, ext = os.path.splitext(basename_in)
2456    root = basename_in
2457
2458    ###########################################
2459    # Read Meta data
2460    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2461    metadatafile = open(root + '.prj')
2462    metalines = metadatafile.readlines()
2463    metadatafile.close()
2464
2465    L = metalines[0].strip().split()
2466    assert L[0].strip().lower() == 'projection'
2467    projection = L[1].strip()                   #TEXT
2468
2469    L = metalines[1].strip().split()
2470    assert L[0].strip().lower() == 'zone'
2471    zone = int(L[1].strip())
2472
2473    L = metalines[2].strip().split()
2474    assert L[0].strip().lower() == 'datum'
2475    datum = L[1].strip()                        #TEXT
2476
2477    L = metalines[3].strip().split()
2478    assert L[0].strip().lower() == 'zunits'     #IGNORE
2479    zunits = L[1].strip()                       #TEXT
2480
2481    L = metalines[4].strip().split()
2482    assert L[0].strip().lower() == 'units'
2483    units = L[1].strip()                        #TEXT
2484
2485    L = metalines[5].strip().split()
2486    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2487    spheroid = L[1].strip()                     #TEXT
2488
2489    L = metalines[6].strip().split()
2490    assert L[0].strip().lower() == 'xshift'
2491    false_easting = float(L[1].strip())
2492
2493    L = metalines[7].strip().split()
2494    assert L[0].strip().lower() == 'yshift'
2495    false_northing = float(L[1].strip())
2496
2497    #print false_easting, false_northing, zone, datum
2498
2499
2500    ###########################################
2501    #Read DEM data
2502
2503    datafile = open(basename_in + '.asc')
2504
2505    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2506    lines = datafile.readlines()
2507    datafile.close()
2508
2509    if verbose: print 'Got', len(lines), ' lines'
2510
2511    ncols = int(lines[0].split()[1].strip())
2512    nrows = int(lines[1].split()[1].strip())
[4824]2513
2514    # Do cellsize (line 4) before line 2 and 3
2515    cellsize = float(lines[4].split()[1].strip())   
2516
2517    # Checks suggested by Joaquim Luis
2518    # Our internal representation of xllcorner
2519    # and yllcorner is non-standard.
2520    xref = lines[2].split()
2521    if xref[0].strip() == 'xllcorner':
2522        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2523    elif xref[0].strip() == 'xllcenter':
2524        xllcorner = float(xref[1].strip())
2525    else:
2526        msg = 'Unknown keyword: %s' %xref[0].strip()
2527        raise Exception, msg
2528
2529    yref = lines[3].split()
2530    if yref[0].strip() == 'yllcorner':
2531        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2532    elif yref[0].strip() == 'yllcenter':
2533        yllcorner = float(yref[1].strip())
2534    else:
2535        msg = 'Unknown keyword: %s' %yref[0].strip()
2536        raise Exception, msg
2537       
2538
[2852]2539    NODATA_value = int(lines[5].split()[1].strip())
2540
2541    assert len(lines) == nrows + 6
2542
2543
2544    ##########################################
2545
2546
2547    if basename_out == None:
2548        netcdfname = root + '.dem'
2549    else:
2550        netcdfname = basename_out + '.dem'
2551
2552    if verbose: print 'Store to NetCDF file %s' %netcdfname
2553    # NetCDF file definition
2554    fid = NetCDFFile(netcdfname, 'w')
2555
2556    #Create new file
2557    fid.institution = 'Geoscience Australia'
2558    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2559                      'of spatial point data'
2560
2561    fid.ncols = ncols
2562    fid.nrows = nrows
2563    fid.xllcorner = xllcorner
2564    fid.yllcorner = yllcorner
2565    fid.cellsize = cellsize
2566    fid.NODATA_value = NODATA_value
2567
2568    fid.zone = zone
2569    fid.false_easting = false_easting
2570    fid.false_northing = false_northing
2571    fid.projection = projection
2572    fid.datum = datum
2573    fid.units = units
2574
2575
2576    # dimension definitions
2577    fid.createDimension('number_of_rows', nrows)
2578    fid.createDimension('number_of_columns', ncols)
2579
2580    # variable definitions
2581    fid.createVariable('elevation', Float, ('number_of_rows',
2582                                            'number_of_columns'))
2583
2584    # Get handles to the variables
2585    elevation = fid.variables['elevation']
2586
2587    #Store data
2588    n = len(lines[6:])
2589    for i, line in enumerate(lines[6:]):
2590        fields = line.split()
2591        if verbose and i%((n+10)/10)==0:
2592            print 'Processing row %d of %d' %(i, nrows)
2593
2594        elevation[i, :] = array([float(x) for x in fields])
2595
2596    fid.close()
2597
2598
2599
2600
2601
2602def ferret2sww(basename_in, basename_out = None,
2603               verbose = False,
2604               minlat = None, maxlat = None,
2605               minlon = None, maxlon = None,
2606               mint = None, maxt = None, mean_stage = 0,
2607               origin = None, zscale = 1,
2608               fail_on_NaN = True,
2609               NaN_filler = 0,
2610               elevation = None,
[3694]2611               inverted_bathymetry = True
[2852]2612               ): #FIXME: Bathymetry should be obtained
2613                                  #from MOST somehow.
2614                                  #Alternatively from elsewhere
2615                                  #or, as a last resort,
2616                                  #specified here.
2617                                  #The value of -100 will work
2618                                  #for the Wollongong tsunami
2619                                  #scenario but is very hacky
2620    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
[3560]2621    sww format native to abstract_2d_finite_volumes.
[2852]2622
2623    Specify only basename_in and read files of the form
2624    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2625    relative height, x-velocity and y-velocity, respectively.
2626
2627    Also convert latitude and longitude to UTM. All coordinates are
2628    assumed to be given in the GDA94 datum.
2629
2630    min's and max's: If omitted - full extend is used.
2631    To include a value min may equal it, while max must exceed it.
2632    Lat and lon are assuemd to be in decimal degrees
2633
2634    origin is a 3-tuple with geo referenced
2635    UTM coordinates (zone, easting, northing)
2636
2637    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2638    which means that longitude is the fastest
2639    varying dimension (row major order, so to speak)
2640
2641    ferret2sww uses grid points as vertices in a triangular grid
2642    counting vertices from lower left corner upwards, then right
2643    """
2644
2645    import os
2646    from Scientific.IO.NetCDF import NetCDFFile
2647    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2648    from Numeric import allclose, around
2649
2650    precision = Float
2651
2652    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2653
2654    if minlat != None:
2655        assert -90 < minlat < 90 , msg
2656    if maxlat != None:
2657        assert -90 < maxlat < 90 , msg
[4050]2658        if minlat != None:
2659            assert maxlat > minlat
[2852]2660    if minlon != None:
2661        assert -180 < minlon < 180 , msg
2662    if maxlon != None:
2663        assert -180 < maxlon < 180 , msg
[4050]2664        if minlon != None:
2665            assert maxlon > minlon
2666       
[2852]2667
2668
2669    #Get NetCDF data
2670    if verbose: print 'Reading files %s_*.nc' %basename_in
[3694]2671    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
[2852]2672    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2673    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2674    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2675    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2676
2677    if basename_out is None:
2678        swwname = basename_in + '.sww'
2679    else:
2680        swwname = basename_out + '.sww'
2681
[4418]2682    # Get dimensions of file_h
[2852]2683    for dimension in file_h.dimensions.keys():
2684        if dimension[:3] == 'LON':
2685            dim_h_longitude = dimension
2686        if dimension[:3] == 'LAT':
2687            dim_h_latitude = dimension
2688        if dimension[:4] == 'TIME':
2689            dim_h_time = dimension
2690
2691#    print 'long:', dim_h_longitude
2692#    print 'lats:', dim_h_latitude
2693#    print 'times:', dim_h_time
2694
2695    times = file_h.variables[dim_h_time]
2696    latitudes = file_h.variables[dim_h_latitude]
2697    longitudes = file_h.variables[dim_h_longitude]
[5347]2698
2699    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2700                                                  longitudes[:],
2701                                                  minlat, maxlat,
2702                                                  minlon, maxlon)
[4418]2703    # get dimensions for file_e
[2852]2704    for dimension in file_e.dimensions.keys():
2705        if dimension[:3] == 'LON':
2706            dim_e_longitude = dimension
2707        if dimension[:3] == 'LAT':
2708            dim_e_latitude = dimension
2709
[4418]2710    # get dimensions for file_u
[2852]2711    for dimension in file_u.dimensions.keys():
2712        if dimension[:3] == 'LON':
2713            dim_u_longitude = dimension
2714        if dimension[:3] == 'LAT':
2715            dim_u_latitude = dimension
2716        if dimension[:4] == 'TIME':
2717            dim_u_time = dimension
2718           
[4418]2719    # get dimensions for file_v
[2852]2720    for dimension in file_v.dimensions.keys():
2721        if dimension[:3] == 'LON':
2722            dim_v_longitude = dimension
2723        if dimension[:3] == 'LAT':
2724            dim_v_latitude = dimension
2725        if dimension[:4] == 'TIME':
2726            dim_v_time = dimension
2727
2728
[4418]2729    # Precision used by most for lat/lon is 4 or 5 decimals
[2852]2730    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2731    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2732
[4418]2733    # Check that files are compatible
[2852]2734    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2735    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2736    assert allclose(latitudes, e_lat)
2737
2738    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2739    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2740    assert allclose(longitudes, e_lon)
2741
[4418]2742    if mint is None:
[2852]2743        jmin = 0
[4418]2744        mint = times[0]       
[2852]2745    else:
2746        jmin = searchsorted(times, mint)
[4024]2747       
[4418]2748    if maxt is None:
2749        jmax = len(times)
2750        maxt = times[-1]
[2852]2751    else:
2752        jmax = searchsorted(times, maxt)
2753
[4037]2754    #print "latitudes[:]",latitudes[:]
2755    #print "longitudes[:]",longitudes [:]
[4024]2756    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2757                                                  longitudes[:],
[4418]2758                                                  minlat, maxlat,
2759                                                  minlon, maxlon)
[2852]2760
2761
2762    times = times[jmin:jmax]
2763    latitudes = latitudes[kmin:kmax]
2764    longitudes = longitudes[lmin:lmax]
2765
[4037]2766    #print "latitudes[:]",latitudes[:]
2767    #print "longitudes[:]",longitudes [:]
[2852]2768
2769    if verbose: print 'cropping'
2770    zname = 'ELEVATION'
2771
2772    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2773    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2774    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2775    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2776
2777    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2778    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2779    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2780    #        from Numeric import asarray
2781    #        elevations=elevations.tolist()
2782    #        elevations.reverse()
2783    #        elevations=asarray(elevations)
2784    #    else:
2785    #        from Numeric import asarray
2786    #        elevations=elevations.tolist()
2787    #        elevations.reverse()
2788    #        elevations=asarray(elevations)
2789    #        'print hmmm'
2790
2791
2792
2793    #Get missing values
2794    nan_ha = file_h.variables['HA'].missing_value[0]
2795    nan_ua = file_u.variables['UA'].missing_value[0]
2796    nan_va = file_v.variables['VA'].missing_value[0]
2797    if hasattr(file_e.variables[zname],'missing_value'):
2798        nan_e  = file_e.variables[zname].missing_value[0]
2799    else:
2800        nan_e = None
2801
2802    #Cleanup
2803    from Numeric import sometrue
2804
2805    missing = (amplitudes == nan_ha)
2806    if sometrue (missing):
2807        if fail_on_NaN:
2808            msg = 'NetCDFFile %s contains missing values'\
2809                  %(basename_in+'_ha.nc')
2810            raise DataMissingValuesError, msg
2811        else:
2812            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2813
2814    missing = (uspeed == nan_ua)
2815    if sometrue (missing):
2816        if fail_on_NaN:
2817            msg = 'NetCDFFile %s contains missing values'\
2818                  %(basename_in+'_ua.nc')
2819            raise DataMissingValuesError, msg
2820        else:
2821            uspeed = uspeed*(missing==0) + missing*NaN_filler
2822
2823    missing = (vspeed == nan_va)
2824    if sometrue (missing):
2825        if fail_on_NaN:
2826            msg = 'NetCDFFile %s contains missing values'\
2827                  %(basename_in+'_va.nc')
2828            raise DataMissingValuesError, msg
2829        else:
2830            vspeed = vspeed*(missing==0) + missing*NaN_filler
2831
2832
2833    missing = (elevations == nan_e)
2834    if sometrue (missing):
2835        if fail_on_NaN:
2836            msg = 'NetCDFFile %s contains missing values'\
2837                  %(basename_in+'_e.nc')
2838            raise DataMissingValuesError, msg
2839        else:
2840            elevations = elevations*(missing==0) + missing*NaN_filler
2841
2842    #######
2843
2844
2845
2846    number_of_times = times.shape[0]
2847    number_of_latitudes = latitudes.shape[0]
2848    number_of_longitudes = longitudes.shape[0]
2849
2850    assert amplitudes.shape[0] == number_of_times
2851    assert amplitudes.shape[1] == number_of_latitudes
2852    assert amplitudes.shape[2] == number_of_longitudes
2853
2854    if verbose:
2855        print '------------------------------------------------'
2856        print 'Statistics:'
2857        print '  Extent (lat/lon):'
2858        print '    lat in [%f, %f], len(lat) == %d'\
2859              %(min(latitudes.flat), max(latitudes.flat),
2860                len(latitudes.flat))
2861        print '    lon in [%f, %f], len(lon) == %d'\
2862              %(min(longitudes.flat), max(longitudes.flat),
2863                len(longitudes.flat))
2864        print '    t in [%f, %f], len(t) == %d'\
2865              %(min(times.flat), max(times.flat), len(times.flat))
2866
2867        q = amplitudes.flat
2868        name = 'Amplitudes (ha) [cm]'
2869        print %s in [%f, %f]' %(name, min(q), max(q))
2870
2871        q = uspeed.flat
2872        name = 'Speeds (ua) [cm/s]'
2873        print %s in [%f, %f]' %(name, min(q), max(q))
2874
2875        q = vspeed.flat
2876        name = 'Speeds (va) [cm/s]'
2877        print %s in [%f, %f]' %(name, min(q), max(q))
2878
2879        q = elevations.flat
2880        name = 'Elevations (e) [m]'
2881        print %s in [%f, %f]' %(name, min(q), max(q))
2882
2883
[4704]2884    # print number_of_latitudes, number_of_longitudes
[2852]2885    number_of_points = number_of_latitudes*number_of_longitudes
2886    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2887
2888
2889    file_h.close()
2890    file_u.close()
2891    file_v.close()
2892    file_e.close()
2893
2894
2895    # NetCDF file definition
2896    outfile = NetCDFFile(swwname, 'w')
2897
[4387]2898    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2899                  %(basename_in + '_ha.nc',
2900                    basename_in + '_ua.nc',
2901                    basename_in + '_va.nc',
2902                    basename_in + '_e.nc')
[4388]2903   
[4704]2904    # Create new file
[4416]2905    starttime = times[0]
[4862]2906   
[4455]2907    sww = Write_sww()
[4704]2908    sww.store_header(outfile, times, number_of_volumes,
2909                     number_of_points, description=description,
[4862]2910                     verbose=verbose,sww_precision=Float)
[2852]2911
[4704]2912    # Store
[3514]2913    from anuga.coordinate_transforms.redfearn import redfearn
[2852]2914    x = zeros(number_of_points, Float)  #Easting
2915    y = zeros(number_of_points, Float)  #Northing
2916
2917
2918    if verbose: print 'Making triangular grid'
[4704]2919
2920    # Check zone boundaries
[2852]2921    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2922
2923    vertices = {}
2924    i = 0
2925    for k, lat in enumerate(latitudes):       #Y direction
2926        for l, lon in enumerate(longitudes):  #X direction
2927
2928            vertices[l,k] = i
2929
2930            zone, easting, northing = redfearn(lat,lon)
2931
2932            msg = 'Zone boundary crossed at longitude =', lon
2933            #assert zone == refzone, msg
2934            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2935            x[i] = easting
2936            y[i] = northing
2937            i += 1
2938
2939    #Construct 2 triangles per 'rectangular' element
2940    volumes = []
2941    for l in range(number_of_longitudes-1):    #X direction
2942        for k in range(number_of_latitudes-1): #Y direction
2943            v1 = vertices[l,k+1]
2944            v2 = vertices[l,k]
2945            v3 = vertices[l+1,k+1]
2946            v4 = vertices[l+1,k]
2947
2948            volumes.append([v1,v2,v3]) #Upper element
2949            volumes.append([v4,v3,v2]) #Lower element
2950
2951    volumes = array(volumes)
2952
[4387]2953    if origin is None:
2954        origin = Geo_reference(refzone,min(x),min(y))
2955    geo_ref = write_NetCDF_georeference(origin, outfile)
2956   
[2852]2957    if elevation is not None:
2958        z = elevation
2959    else:
2960        if inverted_bathymetry:
2961            z = -1*elevations
2962        else:
2963            z = elevations
2964    #FIXME: z should be obtained from MOST and passed in here
2965
[4862]2966    #FIXME use the Write_sww instance(sww) to write this info
[2852]2967    from Numeric import resize
2968    z = resize(z,outfile.variables['z'][:].shape)
[4387]2969    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2970    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
[3954]2971    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
[2852]2972    outfile.variables['elevation'][:] = z
[3954]2973    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
[2852]2974
2975
2976
2977    #Time stepping
2978    stage = outfile.variables['stage']
2979    xmomentum = outfile.variables['xmomentum']
2980    ymomentum = outfile.variables['ymomentum']
2981
2982    if verbose: print 'Converting quantities'
2983    n = len(times)
2984    for j in range(n):
2985        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2986        i = 0
2987        for k in range(number_of_latitudes):      #Y direction
2988            for l in range(number_of_longitudes): #X direction
2989                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2990                stage[j,i] = w
2991                h = w - z[i]
2992                xmomentum[j,i] = uspeed[j,k,l]/100*h
2993                ymomentum[j,i] = vspeed[j,k,l]/100*h
2994                i += 1
2995
2996    #outfile.close()
2997
2998    #FIXME: Refactor using code from file_function.statistics
2999    #Something like print swwstats(swwname)
3000    if verbose:
3001        x = outfile.variables['x'][:]
3002        y = outfile.variables['y'][:]
3003        print '------------------------------------------------'
3004        print 'Statistics of output file:'
3005        print '  Name: %s' %swwname
3006        print '  Reference:'
3007        print '    Lower left corner: [%f, %f]'\
[4387]3008              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
[4416]3009        print ' Start time: %f' %starttime
[4418]3010        print '    Min time: %f' %mint
3011        print '    Max time: %f' %maxt
[2852]3012        print '  Extent:'
3013        print '    x [m] in [%f, %f], len(x) == %d'\
3014              %(min(x.flat), max(x.flat), len(x.flat))
3015        print '    y [m] in [%f, %f], len(y) == %d'\
3016              %(min(y.flat), max(y.flat), len(y.flat))
3017        print '    t [s] in [%f, %f], len(t) == %d'\
3018              %(min(times), max(times), len(times))
3019        print '  Quantities [SI units]:'
3020        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3021            q = outfile.variables[name][:].flat
3022            print '    %s in [%f, %f]' %(name, min(q), max(q))
3023
3024
3025
3026    outfile.close()
3027
3028
3029
3030
3031
[4303]3032def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
[2852]3033    """Template for converting typical text files with time series to
3034    NetCDF tms file.
3035
3036
3037    The file format is assumed to be either two fields separated by a comma:
3038
3039        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3040
3041    E.g
3042
3043      31/08/04 00:00:00, 1.328223 0 0
3044      31/08/04 00:15:00, 1.292912 0 0
3045
[4303]3046    or time (seconds), value0 value1 value2 ...
3047   
3048      0.0, 1.328223 0 0
3049      0.1, 1.292912 0 0
3050
[2852]3051    will provide a time dependent function f(t) with three attributes
3052
3053    filename is assumed to be the rootname with extenisons .txt and .sww
3054    """
3055
3056    import time, calendar
3057    from Numeric import array
[3514]3058    from anuga.config import time_format
3059    from anuga.utilities.numerical_tools import ensure_numeric
[2852]3060
3061
3062
3063    fid = open(filename + '.txt')
3064    line = fid.readline()
3065    fid.close()
3066
3067    fields = line.split(',')
3068    msg = 'File %s must have the format date, value0 value1 value2 ...'
3069    assert len(fields) == 2, msg
3070
[4303]3071    if not time_as_seconds:
3072        try:
3073            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3074        except ValueError:
3075            msg = 'First field in file %s must be' %filename
3076            msg += ' date-time with format %s.\n' %time_format
3077            msg += 'I got %s instead.' %fields[0]
3078            raise DataTimeError, msg
3079    else:
3080        try:
3081            starttime = float(fields[0])
3082        except Error:
3083            msg = "Bad time format"
3084            raise DataTimeError, msg
[2852]3085
3086
3087    #Split values
3088    values = []
3089    for value in fields[1].split():
3090        values.append(float(value))
3091
3092    q = ensure_numeric(values)
3093
3094    msg = 'ERROR: File must contain at least one independent value'
3095    assert len(q.shape) == 1, msg
3096
3097
3098
3099    #Read times proper
3100    from Numeric import zeros, Float, alltrue
[3514]3101    from anuga.config import time_format
[2852]3102    import time, calendar
3103
3104    fid = open(filename + '.txt')
3105    lines = fid.readlines()
3106    fid.close()
3107
3108    N = len(lines)
3109    d = len(q)
3110
3111    T = zeros(N, Float)       #Time
3112    Q = zeros((N, d), Float)  #Values
3113
3114    for i, line in enumerate(lines):
3115        fields = line.split(',')
[4303]3116        if not time_as_seconds:
3117            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3118        else:
3119             realtime = float(fields[0])
[2852]3120        T[i] = realtime - starttime
3121
3122        for j, value in enumerate(fields[1].split()):
3123            Q[i, j] = float(value)
3124
3125    msg = 'File %s must list time as a monotonuosly ' %filename
3126    msg += 'increasing sequence'
3127    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3128
3129    #Create NetCDF file
3130    from Scientific.IO.NetCDF import NetCDFFile
3131
3132    fid = NetCDFFile(filename + '.tms', 'w')
3133
3134
3135    fid.institution = 'Geoscience Australia'
3136    fid.description = 'Time series'
3137
3138
3139    #Reference point
3140    #Start time in seconds since the epoch (midnight 1/1/1970)
3141    #FIXME: Use Georef
3142    fid.starttime = starttime
3143
3144    # dimension definitions
3145    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3146    #fid.createDimension('number_of_vertices', 3)
3147
3148
3149    fid.createDimension('number_of_timesteps', len(T))
3150
3151    fid.createVariable('time', Float, ('number_of_timesteps',))
3152
3153    fid.variables['time'][:] = T
3154
3155    for i in range(Q.shape[1]):
3156        try:
3157            name = quantity_names[i]
3158        except:
3159            name = 'Attribute%d'%i
3160
3161        fid.createVariable(name, Float, ('number_of_timesteps',))
3162        fid.variables[name][:] = Q[:,i]
3163
3164    fid.close()
3165
3166
3167def extent_sww(file_name):
3168    """
3169    Read in an sww file.
3170
3171    Input;
3172    file_name - the sww file
3173
3174    Output;
3175    z - Vector of bed elevation
3176    volumes - Array.  Each row has 3 values, representing
3177    the vertices that define the volume
3178    time - Vector of the times where there is stage information
3179    stage - array with respect to time and vertices (x,y)
3180    """
3181
3182
3183    from Scientific.IO.NetCDF import NetCDFFile
3184
3185    #Check contents
3186    #Get NetCDF
3187    fid = NetCDFFile(file_name, 'r')
3188
3189    # Get the variables
3190    x = fid.variables['x'][:]
3191    y = fid.variables['y'][:]
3192    stage = fid.variables['stage'][:]
3193    #print "stage",stage
3194    #print "stage.shap",stage.shape
3195    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3196    #print "min(stage)",min(stage)
3197
3198    fid.close()
3199
3200    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3201
3202
[5276]3203def sww2domain(filename, boundary=None, t=None,
3204               fail_if_NaN=True ,NaN_filler=0,
3205               verbose = False, very_verbose = False):
[2852]3206    """
3207    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3208
3209    Boundary is not recommended if domain.smooth is not selected, as it
3210    uses unique coordinates, but not unique boundaries. This means that
3211    the boundary file will not be compatable with the coordinates, and will
3212    give a different final boundary, or crash.
3213    """
3214    NaN=9.969209968386869e+036
3215    #initialise NaN.
3216
3217    from Scientific.IO.NetCDF import NetCDFFile
3218    from shallow_water import Domain
3219    from Numeric import asarray, transpose, resize
3220
3221    if verbose: print 'Reading from ', filename
3222    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3223    time = fid.variables['time']       #Timesteps
3224    if t is None:
3225        t = time[-1]
3226    time_interp = get_time_interp(time,t)
3227
3228    # Get the variables as Numeric arrays
3229    x = fid.variables['x'][:]             #x-coordinates of vertices
3230    y = fid.variables['y'][:]             #y-coordinates of vertices
3231    elevation = fid.variables['elevation']     #Elevation
3232    stage = fid.variables['stage']     #Water level
3233    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3234    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3235
3236    starttime = fid.starttime[0]
3237    volumes = fid.variables['volumes'][:] #Connectivity
[5276]3238    coordinates = transpose(asarray([x.tolist(),y.tolist()]))
3239    #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 )
3240    # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 )
[2852]3241
3242    conserved_quantities = []
3243    interpolated_quantities = {}
3244    other_quantities = []
3245
3246    # get geo_reference
3247    #sww files don't have to have a geo_ref
3248    try:
3249        geo_reference = Geo_reference(NetCDFObject=fid)
3250    except: #AttributeError, e:
3251        geo_reference = None
3252
3253    if verbose: print '    getting quantities'
3254    for quantity in fid.variables.keys():
3255        dimensions = fid.variables[quantity].dimensions
3256        if 'number_of_timesteps' in dimensions:
3257            conserved_quantities.append(quantity)
3258            interpolated_quantities[quantity]=\
3259                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3260        else: other_quantities.append(quantity)
3261
3262    other_quantities.remove('x')
3263    other_quantities.remove('y')
3264    other_quantities.remove('z')
3265    other_quantities.remove('volumes')
[4455]3266    try:
3267        other_quantities.remove('stage_range')
3268        other_quantities.remove('xmomentum_range')
3269        other_quantities.remove('ymomentum_range')
3270        other_quantities.remove('elevation_range')
3271    except:
3272        pass
3273       
[2852]3274
3275    conserved_quantities.remove('time')
3276
3277    if verbose: print '    building domain'
3278    #    From domain.Domain:
3279    #    domain = Domain(coordinates, volumes,\
3280    #                    conserved_quantities = conserved_quantities,\
3281    #                    other_quantities = other_quantities,zone=zone,\
3282    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3283
3284    #   From shallow_water.Domain:
3285    coordinates=coordinates.tolist()
3286    volumes=volumes.tolist()
3287    #FIXME:should this be in mesh?(peter row)
3288    if fid.smoothing == 'Yes': unique = False
3289    else: unique = True
3290    if unique:
3291        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3292
3293
3294    try:
3295        domain = Domain(coordinates, volumes, boundary)
3296    except AssertionError, e:
3297        fid.close()
3298        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3299        raise DataDomainError, msg
3300
3301    if not boundary is None:
3302        domain.boundary = boundary
3303
3304    domain.geo_reference = geo_reference
3305
3306    domain.starttime=float(starttime)+float(t)
3307    domain.time=0.0
3308
3309    for quantity in other_quantities:
3310        try:
3311            NaN = fid.variables[quantity].missing_value
3312        except:
3313            pass #quantity has no missing_value number
3314        X = fid.variables[quantity][:]
3315        if very_verbose:
3316            print '       ',quantity
3317            print '        NaN =',NaN
3318            print '        max(X)'
3319            print '       ',max(X)
3320            print '        max(X)==NaN'
3321            print '       ',max(X)==NaN
3322            print ''
3323        if (max(X)==NaN) or (min(X)==NaN):
3324            if fail_if_NaN:
3325                msg = 'quantity "%s" contains no_data entry'%quantity
3326                raise DataMissingValuesError, msg
3327            else:
3328                data = (X<>NaN)
3329                X = (X*data)+(data==0)*NaN_filler
3330        if unique:
3331            X = resize(X,(len(X)/3,3))
3332        domain.set_quantity(quantity,X)
3333    #
3334    for quantity in conserved_quantities:
3335        try:
3336            NaN = fid.variables[quantity].missing_value
3337        except:
3338            pass #quantity has no missing_value number
3339        X = interpolated_quantities[quantity]
3340        if very_verbose:
3341            print '       ',quantity
3342            print '        NaN =',NaN
3343            print '        max(X)'
3344            print '       ',max(X)
3345            print '        max(X)==NaN'
3346            print '       ',max(X)==NaN
3347            print ''
3348        if (max(X)==NaN) or (min(X)==NaN):
3349            if fail_if_NaN:
3350                msg = 'quantity "%s" contains no_data entry'%quantity
3351                raise DataMissingValuesError, msg
3352            else:
3353                data = (X<>NaN)
3354                X = (X*data)+(data==0)*NaN_filler
3355        if unique:
3356            X = resize(X,(X.shape[0]/3,3))
3357        domain.set_quantity(quantity,X)
3358
3359    fid.close()
3360    return domain
3361
[5276]3362
[2852]3363def interpolated_quantity(saved_quantity,time_interp):
3364
3365    #given an index and ratio, interpolate quantity with respect to time.
3366    index,ratio = time_interp
3367    Q = saved_quantity
3368    if ratio > 0:
3369        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3370    else:
3371        q = Q[index]
3372    #Return vector of interpolated values
3373    return q
3374
[5276]3375
[2852]3376def get_time_interp(time,t=None):
3377    #Finds the ratio and index for time interpolation.
[3560]3378    #It is borrowed from previous abstract_2d_finite_volumes code.
[2852]3379    if t is None:
3380        t=time[-1]
3381        index = -1
3382        ratio = 0.
3383    else:
3384        T = time
3385        tau = t
3386        index=0
3387        msg = 'Time interval derived from file %s [%s:%s]'\
3388            %('FIXMEfilename', T[0], T[-1])
3389        msg += ' does not match model time: %s' %tau
3390        if tau < time[0]: raise DataTimeError, msg
3391        if tau > time[-1]: raise DataTimeError, msg
3392        while tau > time[index]: index += 1
3393        while tau < time[index]: index -= 1
3394        if tau == time[index]:
3395            #Protect against case where tau == time[-1] (last time)
3396            # - also works in general when tau == time[i]
3397            ratio = 0
3398        else:
3399            #t is now between index and index+1
3400            ratio = (tau - time[index])/(time[index+1] - time[index])
3401    return (index,ratio)
3402
3403
3404def weed(coordinates,volumes,boundary = None):
[4455]3405    if type(coordinates)==ArrayType:
[2852]3406        coordinates = coordinates.tolist()
[4455]3407    if type(volumes)==ArrayType:
[2852]3408        volumes = volumes.tolist()
3409
3410    unique = False
3411    point_dict = {}
3412    same_point = {}
3413    for i in range(len(coordinates)):
3414        point = tuple(coordinates[i])
3415        if point_dict.has_key(point):
3416            unique = True
3417            same_point[i]=point
3418            #to change all point i references to point j
3419        else:
3420            point_dict[point]=i
3421            same_point[i]=point
3422
3423    coordinates = []
3424    i = 0
3425    for point in point_dict.keys():
3426        point = tuple(point)
3427        coordinates.append(list(point))
3428        point_dict[point]=i
3429        i+=1
3430
3431
3432    for volume in volumes:
3433        for i in range(len(volume)):
3434            index = volume[i]
3435            if index>-1:
3436                volume[i]=point_dict[same_point[index]]
3437
3438    new_boundary = {}
3439    if not boundary is None:
3440        for segment in boundary.keys():
3441            point0 = point_dict[same_point[segment[0]]]
3442            point1 = point_dict[same_point[segment[1]]]
3443            label = boundary[segment]
3444            #FIXME should the bounday attributes be concaterated
3445            #('exterior, pond') or replaced ('pond')(peter row)
3446
3447            if new_boundary.has_key((point0,point1)):
3448                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3449                                              #+','+label
3450
3451            elif new_boundary.has_key((point1,point0)):
3452                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3453                                              #+','+label
3454            else: new_boundary[(point0,point1)]=label
3455
3456        boundary = new_boundary
3457
3458    return coordinates,volumes,boundary
3459
3460
3461def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3462                 verbose=False):
3463    """Read Digitial Elevation model from the following NetCDF format (.dem)
3464
3465    Example:
3466
3467    ncols         3121
3468    nrows         1800
3469    xllcorner     722000
3470    yllcorner     5893000
3471    cellsize      25
3472    NODATA_value  -9999
3473    138.3698 137.4194 136.5062 135.5558 ..........
3474
3475    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3476    """
3477
3478    import os
3479    from Scientific.IO.NetCDF import NetCDFFile
3480    from Numeric import Float, zeros, sum, reshape, equal
3481
3482    root = basename_in
3483    inname = root + '.dem'
3484
3485    #Open existing netcdf file to read
3486    infile = NetCDFFile(inname, 'r')
3487    if verbose: print 'Reading DEM from %s' %inname
3488
3489    #Read metadata
3490    ncols = infile.ncols[0]
3491    nrows = infile.nrows[0]
3492    xllcorner = infile.xllcorner[0]
3493    yllcorner = infile.yllcorner[0]
3494    cellsize = infile.cellsize[0]
3495    NODATA_value = infile.NODATA_value[0]
3496    zone = infile.zone[0]
3497    false_easting = infile.false_easting[0]
3498    false_northing = infile.false_northing[0]
3499    projection = infile.projection
3500    datum = infile.datum
3501    units = infile.units
3502
3503    dem_elevation = infile.variables['elevation']
3504
3505    #Get output file name
3506    if basename_out == None:
3507        outname = root + '_' + repr(cellsize_new) + '.dem'
3508    else:
3509        outname = basename_out + '.dem'
3510
3511    if verbose: print 'Write decimated NetCDF file to %s' %outname
3512
3513    #Determine some dimensions for decimated grid
3514    (nrows_stencil, ncols_stencil) = stencil.shape
3515    x_offset = ncols_stencil / 2
3516    y_offset = nrows_stencil / 2
3517    cellsize_ratio = int(cellsize_new / cellsize)
3518    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3519    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3520
3521    #Open netcdf file for output
3522    outfile = NetCDFFile(outname, 'w')
3523
3524    #Create new file
3525    outfile.institution = 'Geoscience Australia'
3526    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3527                           'of spatial point data'
3528    #Georeferencing
3529    outfile.zone = zone
3530    outfile.projection = projection
3531    outfile.datum = datum
3532    outfile.units = units
3533
3534    outfile.cellsize = cellsize_new
3535    outfile.NODATA_value = NODATA_value
3536    outfile.false_easting = false_easting
3537    outfile.false_northing = false_northing
3538
3539    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3540    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3541    outfile.ncols = ncols_new
3542    outfile.nrows = nrows_new
3543
3544    # dimension definition
3545    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3546
3547    # variable definition
3548    outfile.createVariable('elevation', Float, ('number_of_points',))
3549
3550    # Get handle to the variable
3551    elevation = outfile.variables['elevation']
3552
3553    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3554
3555    #Store data
3556    global_index = 0
3557    for i in range(nrows_new):
3558        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3559        lower_index = global_index
3560        telev =  zeros(ncols_new, Float)
3561        local_index = 0
3562        trow = i * cellsize_ratio
3563
3564        for j in range(ncols_new):
3565            tcol = j * cellsize_ratio
3566            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3567
3568            #if dem contains 1 or more NODATA_values set value in
3569            #decimated dem to NODATA_value, else compute decimated
3570            #value using stencil
3571            if sum(sum(equal(tmp, NODATA_value))) > 0:
3572                telev[local_index] = NODATA_value
3573            else:
3574                telev[local_index] = sum(sum(tmp * stencil))
3575
3576            global_index += 1
3577            local_index += 1
3578
3579        upper_index = global_index
3580
3581        elevation[lower_index:upper_index] = telev
3582
3583    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3584
3585    infile.close()
3586    outfile.close()
3587
3588
3589
3590
[4868]3591def tsh2sww(filename, verbose=False): 
[2852]3592    """
3593    to check if a tsh/msh file 'looks' good.
3594    """
3595
3596
3597    if verbose == True:print 'Creating domain from', filename
3598    domain = pmesh_to_domain_instance(filename, Domain)
3599    if verbose == True:print "Number of triangles = ", len(domain)
3600
3601    domain.smooth = True
3602    domain.format = 'sww'   #Native netcdf visualisation format
3603    file_path, filename = path.split(filename)
3604    filename, ext = path.splitext(filename)
[3846]3605    domain.set_name(filename)   
[2852]3606    domain.reduction = mean
3607    if verbose == True:print "file_path",file_path
3608    if file_path == "":file_path = "."
3609    domain.set_datadir(file_path)
3610
3611    if verbose == True:
3612        print "Output written to " + domain.get_datadir() + sep + \
[3846]3613              domain.get_name() + "." + domain.format
[2852]3614    sww = get_dataobject(domain)
3615    sww.store_connectivity()
[4868]3616    sww.store_timestep()
[2852]3617
3618
3619def asc_csiro2sww(bath_dir,
3620                  elevation_dir,
3621                  ucur_dir,
3622                  vcur_dir,
3623                  sww_file,
3624                  minlat = None, maxlat = None,
3625                  minlon = None, maxlon = None,
3626                  zscale=1,
3627                  mean_stage = 0,
3628                  fail_on_NaN = True,
3629                  elevation_NaN_filler = 0,
3630                  bath_prefix='ba',
3631                  elevation_prefix='el',
3632                  verbose=False):
3633    """
3634    Produce an sww boundary file, from esri ascii data from CSIRO.
3635
3636    Also convert latitude and longitude to UTM. All coordinates are
3637    assumed to be given in the GDA94 datum.
3638
3639    assume:
3640    All files are in esri ascii format
3641
3642    4 types of information
3643    bathymetry
3644    elevation
3645    u velocity
3646    v velocity
3647
3648    Assumptions
3649    The metadata of all the files is the same
3650    Each type is in a seperate directory
3651    One bath file with extention .000
3652    The time period is less than 24hrs and uniform.
3653    """
3654    from Scientific.IO.NetCDF import NetCDFFile
3655
[3514]3656    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3657
3658    precision = Float # So if we want to change the precision its done here
3659
3660    # go in to the bath dir and load the only file,
3661    bath_files = os.listdir(bath_dir)
3662
3663    bath_file = bath_files[0]
3664    bath_dir_file =  bath_dir + os.sep + bath_file
3665    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3666
3667    #Use the date.time of the bath file as a basis for
3668    #the start time for other files
3669    base_start = bath_file[-12:]
3670
3671    #go into the elevation dir and load the 000 file
3672    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3673                         + base_start
3674
3675    elevation_files = os.listdir(elevation_dir)
3676    ucur_files = os.listdir(ucur_dir)
3677    vcur_files = os.listdir(vcur_dir)
[4031]3678    elevation_files.sort()
[2852]3679    # the first elevation file should be the
3680    # file with the same base name as the bath data
3681    assert elevation_files[0] == 'el' + base_start
3682
3683    number_of_latitudes = bath_grid.shape[0]
3684    number_of_longitudes = bath_grid.shape[1]
3685    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3686
3687    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3688                  for x in range(number_of_longitudes)]
3689    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3690                 for y in range(number_of_latitudes)]
3691
3692     # reverse order of lat, so the fist lat represents the first grid row
3693    latitudes.reverse()
3694
[4027]3695    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
[2852]3696                                                 minlat=minlat, maxlat=maxlat,
3697                                                 minlon=minlon, maxlon=maxlon)
3698
3699
3700    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3701    latitudes = latitudes[kmin:kmax]
3702    longitudes = longitudes[lmin:lmax]
3703    number_of_latitudes = len(latitudes)
3704    number_of_longitudes = len(longitudes)
3705    number_of_times = len(os.listdir(elevation_dir))
3706    number_of_points = number_of_latitudes*number_of_longitudes
3707    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3708
3709    #Work out the times
3710    if len(elevation_files) > 1:
3711        # Assume: The time period is less than 24hrs.
3712        time_period = (int(elevation_files[1][-3:]) - \
3713                      int(elevation_files[0][-3:]))*60*60
3714        times = [x*time_period for x in range(len(elevation_files))]
3715    else:
3716        times = [0.0]
3717
3718
3719    if verbose:
3720        print '------------------------------------------------'
3721        print 'Statistics:'
3722        print '  Extent (lat/lon):'
3723        print '    lat in [%f, %f], len(lat) == %d'\
3724              %(min(latitudes), max(latitudes),
3725                len(latitudes))
3726        print '    lon in [%f, %f], len(lon) == %d'\
3727              %(min(longitudes), max(longitudes),
3728                len(longitudes))
3729        print '    t in [%f, %f], len(t) == %d'\
3730              %(min(times), max(times), len(times))
3731
3732    ######### WRITE THE SWW FILE #############
3733    # NetCDF file definition
3734    outfile = NetCDFFile(sww_file, 'w')
3735
3736    #Create new file
3737    outfile.institution = 'Geoscience Australia'
3738    outfile.description = 'Converted from XXX'
3739
3740
3741    #For sww compatibility
3742    outfile.smoothing = 'Yes'
3743    outfile.order = 1
3744
3745    #Start time in seconds since the epoch (midnight 1/1/1970)
3746    outfile.starttime = starttime = times[0]
3747
3748
3749    # dimension definitions
3750    outfile.createDimension('number_of_volumes', number_of_volumes)
3751
3752    outfile.createDimension('number_of_vertices', 3)
3753    outfile.createDimension('number_of_points', number_of_points)
3754    outfile.createDimension('number_of_timesteps', number_of_times)
3755
3756    # variable definitions
3757    outfile.createVariable('x', precision, ('number_of_points',))
3758    outfile.createVariable('y', precision, ('number_of_points',))
3759    outfile.createVariable('elevation', precision, ('number_of_points',))
3760
3761    #FIXME: Backwards compatibility
3762    outfile.createVariable('z', precision, ('number_of_points',))
3763    #################################
3764
3765    outfile.createVariable('volumes', Int, ('number_of_volumes',
3766                                            'number_of_vertices'))
3767
3768    outfile.createVariable('time', precision,
3769                           ('number_of_timesteps',))
3770
3771    outfile.createVariable('stage', precision,
3772                           ('number_of_timesteps',
3773                            'number_of_points'))
3774
3775    outfile.createVariable('xmomentum', precision,
3776                           ('number_of_timesteps',
3777                            'number_of_points'))
3778
3779    outfile.createVariable('ymomentum', precision,
3780                           ('number_of_timesteps',
3781                            'number_of_points'))
3782
3783    #Store
[3514]3784    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3785    x = zeros(number_of_points, Float)  #Easting
3786    y = zeros(number_of_points, Float)  #Northing
3787
3788    if verbose: print 'Making triangular grid'
3789    #Get zone of 1st point.
3790    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3791
3792    vertices = {}
3793    i = 0
3794    for k, lat in enumerate(latitudes):
3795        for l, lon in enumerate(longitudes):
3796
3797            vertices[l,k] = i
3798
3799            zone, easting, northing = redfearn(lat,lon)
3800
3801            msg = 'Zone boundary crossed at longitude =', lon
3802            #assert zone == refzone, msg
3803            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3804            x[i] = easting
3805            y[i] = northing
3806            i += 1
3807
3808
3809    #Construct 2 triangles per 'rectangular' element
3810    volumes = []
3811    for l in range(number_of_longitudes-1):    #X direction
3812        for k in range(number_of_latitudes-1): #Y direction
3813            v1 = vertices[l,k+1]
3814            v2 = vertices[l,k]
3815            v3 = vertices[l+1,k+1]
3816            v4 = vertices[l+1,k]
3817
3818            #Note, this is different to the ferrit2sww code
3819            #since the order of the lats is reversed.
3820            volumes.append([v1,v3,v2]) #Upper element
3821            volumes.append([v4,v2,v3]) #Lower element
3822
3823    volumes = array(volumes)
3824
3825    geo_ref = Geo_reference(refzone,min(x),min(y))
3826    geo_ref.write_NetCDF(outfile)
3827
3828    # This will put the geo ref in the middle
3829    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3830
3831
3832    if verbose:
3833        print '------------------------------------------------'
3834        print 'More Statistics:'
3835        print '  Extent (/lon):'
3836        print '    x in [%f, %f], len(lat) == %d'\
3837              %(min(x), max(x),
3838                len(x))
3839        print '    y in [%f, %f], len(lon) == %d'\
3840              %(min(y), max(y),
3841                len(y))
3842        print 'geo_ref: ',geo_ref
3843
3844    z = resize(bath_grid,outfile.variables['z'][:].shape)
3845    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3846    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3847    outfile.variables['z'][:] = z
3848    outfile.variables['elevation'][:] = z  #FIXME HACK
3849    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3850
3851    stage = outfile.variables['stage']
3852    xmomentum = outfile.variables['xmomentum']
3853    ymomentum = outfile.variables['ymomentum']
3854
3855    outfile.variables['time'][:] = times   #Store time relative
3856
3857    if verbose: print 'Converting quantities'
3858    n = number_of_times
3859    for j in range(number_of_times):
3860        # load in files
3861        elevation_meta, elevation_grid = \
3862           _read_asc(elevation_dir + os.sep + elevation_files[j])
3863
3864        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3865        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3866
3867        #cut matrix to desired size
3868        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3869        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3870        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
[4271]3871       
[2852]3872        # handle missing values
3873        missing = (elevation_grid == elevation_meta['NODATA_value'])
3874        if sometrue (missing):
3875            if fail_on_NaN:
3876                msg = 'File %s contains missing values'\
3877                      %(elevation_files[j])
3878                raise DataMissingValuesError, msg
3879            else:
3880                elevation_grid = elevation_grid*(missing==0) + \
3881                                 missing*elevation_NaN_filler
3882
3883
3884        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3885        i = 0
3886        for k in range(number_of_latitudes):      #Y direction
3887            for l in range(number_of_longitudes): #X direction
3888                w = zscale*elevation_grid[k,l] + mean_stage
3889                stage[j,i] = w
3890                h = w - z[i]
3891                xmomentum[j,i] = u_momentum_grid[k,l]*h
3892                ymomentum[j,i] = v_momentum_grid[k,l]*h
3893                i += 1
3894    outfile.close()
3895
[4037]3896def _get_min_max_indexes(latitudes_ref,longitudes_ref,
[2852]3897                        minlat=None, maxlat=None,
3898                        minlon=None, maxlon=None):
3899    """
3900    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3901    specified with min/max lat/long
3902
3903    Think of the latitudes and longitudes describing a 2d surface.
3904    The area returned is, if possible, just big enough to cover the
3905    inputed max/min area. (This will not be possible if the max/min area
3906    has a section outside of the latitudes/longitudes area.)
3907
[4037]3908    asset  longitudes are sorted,
[2852]3909    long - from low to high (west to east, eg 148 - 151)
[4037]3910    assert latitudes are sorted, ascending or decending
[2852]3911    """
[4037]3912    latitudes = latitudes_ref[:]
3913    longitudes = longitudes_ref[:]
[2852]3914
[4037]3915    latitudes = ensure_numeric(latitudes)
3916    longitudes = ensure_numeric(longitudes)
[5347]3917
[4037]3918    assert allclose(sort(longitudes), longitudes)
[5347]3919
[5352]3920    #print latitudes[0],longitudes[0]
3921    #print len(latitudes),len(longitudes)
3922    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
[4024]3923   
[4037]3924    lat_ascending = True
3925    if not allclose(sort(latitudes), latitudes):
3926        lat_ascending = False
3927        # reverse order of lat, so it's in ascending order         
3928        latitudes = latitudes[::-1]
3929        assert allclose(sort(latitudes), latitudes)
3930    #print "latitudes  in funct", latitudes
3931   
[2852]3932    largest_lat_index = len(latitudes)-1
3933    #Cut out a smaller extent.
3934    if minlat == None:
3935        lat_min_index = 0
3936    else:
3937        lat_min_index = searchsorted(latitudes, minlat)-1
3938        if lat_min_index <0:
3939            lat_min_index = 0
3940
3941
3942    if maxlat == None:
3943        lat_max_index = largest_lat_index #len(latitudes)
3944    else:
3945        lat_max_index = searchsorted(latitudes, maxlat)
3946        if lat_max_index > largest_lat_index:
3947            lat_max_index = largest_lat_index
3948
3949    if minlon == None:
3950        lon_min_index = 0
3951    else:
3952        lon_min_index = searchsorted(longitudes, minlon)-1
3953        if lon_min_index <0:
3954            lon_min_index = 0
3955
3956    if maxlon == None:
3957        lon_max_index = len(longitudes)
3958    else:
3959        lon_max_index = searchsorted(longitudes, maxlon)
3960
[4037]3961    # Reversing the indexes, if the lat array is decending
3962    if lat_ascending is False:
3963        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3964                                       largest_lat_index - lat_min_index
[2852]3965    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3966    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3967
3968    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3969
3970
3971def _read_asc(filename, verbose=False):
3972    """Read esri file from the following ASCII format (.asc)
3973
3974    Example:
3975
3976    ncols         3121
3977    nrows         1800
3978    xllcorner     722000
3979    yllcorner     5893000
3980    cellsize      25
3981    NODATA_value  -9999
3982    138.3698 137.4194 136.5062 135.5558 ..........
3983
3984    """
3985
3986    datafile = open(filename)
3987
3988    if verbose: print 'Reading DEM from %s' %(filename)
3989    lines = datafile.readlines()
3990    datafile.close()
3991
3992    if verbose: print 'Got', len(lines), ' lines'
3993
3994    ncols = int(lines.pop(0).split()[1].strip())
3995    nrows = int(lines.pop(0).split()[1].strip())
3996    xllcorner = float(lines.pop(0).split()[1].strip())
3997    yllcorner = float(lines.pop(0).split()[1].strip())
3998    cellsize = float(lines.pop(0).split()[1].strip())
3999    NODATA_value = float(lines.pop(0).split()[1].strip())
4000
4001    assert len(lines) == nrows
4002
4003    #Store data
4004    grid = []
4005
4006    n = len(lines)
4007    for i, line in enumerate(lines):
4008        cells = line.split()
4009        assert len(cells) == ncols
4010        grid.append(array([float(x) for x in cells]))
4011    grid = array(grid)
4012
4013    return {'xllcorner':xllcorner,
4014            'yllcorner':yllcorner,
4015            'cellsize':cellsize,
4016            'NODATA_value':NODATA_value}, grid
4017
[2884]4018
[2852]4019
[3720]4020    ####  URS 2 SWW  ###
4021
4022lon_name = 'LON'
4023lat_name = 'LAT'
4024time_name = 'TIME'
4025precision = Float # So if we want to change the precision its done here       
4026class Write_nc:
4027    """
4028    Write an nc file.
4029   
4030    Note, this should be checked to meet cdc netcdf conventions for gridded
4031    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4032   
4033    """
4034    def __init__(self,
4035                 quantity_name,
4036                 file_name,
4037                 time_step_count,
4038                 time_step,
4039                 lon,
4040                 lat):
4041        """
4042        time_step_count is the number of time steps.
4043        time_step is the time step size
4044       
4045        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4046        """
4047        self.quantity_name = quantity_name
[4348]4048        quantity_units = {'HA':'CENTIMETERS',
[4302]4049                              'UA':'CENTIMETERS/SECOND',
[4348]4050                              'VA':'CENTIMETERS/SECOND'}       
[3720]4051       
[4348]4052        multiplier_dic = {'HA':100.0, # To convert from m to cm
4053                              'UA':100.0,  #  m/s to cm/sec
4054                              'VA':-100.0}  # MUX files have positve x in the
4055        # Southern direction.  This corrects for it, when writing nc files.
4056       
4057        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4058       
[3720]4059        #self.file_name = file_name
4060        self.time_step_count = time_step_count
4061        self.time_step = time_step
4062
4063        # NetCDF file definition
4064        self.outfile = NetCDFFile(file_name, 'w')
4065        outfile = self.outfile       
4066
4067        #Create new file
4068        nc_lon_lat_header(outfile, lon, lat)
4069   
4070        # TIME
4071        outfile.createDimension(time_name, None)
4072        outfile.createVariable(time_name, precision, (time_name,))
4073
4074        #QUANTITY
4075        outfile.createVariable(self.quantity_name, precision,
4076                               (time_name, lat_name, lon_name))
4077        outfile.variables[self.quantity_name].missing_value=-1.e+034
4078        outfile.variables[self.quantity_name].units= \
4079                                 quantity_units[self.quantity_name]
4080        outfile.variables[lon_name][:]= ensure_numeric(lon)
4081        outfile.variables[lat_name][:]= ensure_numeric(lat)
4082
4083        #Assume no one will be wanting to read this, while we are writing
4084        #outfile.close()
4085       
4086    def store_timestep(self, quantity_slice):
4087        """
[4348]4088        Write a time slice of quantity info
[3720]4089        quantity_slice is the data to be stored at this time step
4090        """
4091       
4092        outfile = self.outfile
4093       
4094        # Get the variables
4095        time = outfile.variables[time_name]
4096        quantity = outfile.variables[self.quantity_name]
4097           
4098        i = len(time)
4099
4100        #Store time
4101        time[i] = i*self.time_step #self.domain.time
[4348]4102        quantity[i,:] = quantity_slice* self.quantity_multiplier
[3720]4103       
4104    def close(self):
4105        self.outfile.close()
4106
4107def urs2sww(basename_in='o', basename_out=None, verbose=False,
4108            remove_nc_files=True,
4109            minlat=None, maxlat=None,
4110            minlon= None, maxlon=None,
4111            mint=None, maxt=None,
4112            mean_stage=0,
[3930]4113            origin = None,
[3720]4114            zscale=1,
4115            fail_on_NaN=True,
4116            NaN_filler=0,
[4380]4117            elevation=None):
[3720]4118    """
4119    Convert URS C binary format for wave propagation to
4120    sww format native to abstract_2d_finite_volumes.
4121
4122    Specify only basename_in and read files of the form
[4378]4123    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4124    basefilename_waveheight-n-mux containing relative height,
4125    x-velocity and y-velocity, respectively.
[3720]4126
4127    Also convert latitude and longitude to UTM. All coordinates are
4128    assumed to be given in the GDA94 datum. The latitude and longitude
4129    information is for  a grid.
4130
4131    min's and max's: If omitted - full extend is used.
4132    To include a value min may equal it, while max must exceed it.
[3930]4133    Lat and lon are assumed to be in decimal degrees.
[4014]4134    NOTE: minlon is the most east boundary.
[3964]4135   
4136    origin is a 3-tuple with geo referenced
4137    UTM coordinates (zone, easting, northing)
4138    It will be the origin of the sww file. This shouldn't be used,
4139    since all of anuga should be able to handle an arbitary origin.
[3720]4140
[3964]4141
[3720]4142    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4143    which means that latitude is the fastest
4144    varying dimension (row major order, so to speak)
4145
4146    In URS C binary the latitudes and longitudes are in assending order.
4147    """
4148    if basename_out == None:
4149        basename_out = basename_in
4150    files_out = urs2nc(basename_in, basename_out)
4151    ferret2sww(basename_out,
4152               minlat=minlat,
4153               maxlat=maxlat,
4154               minlon=minlon,
[4014]4155               maxlon=maxlon,
[3720]4156               mint=mint,
4157               maxt=maxt,
4158               mean_stage=mean_stage,
[3930]4159               origin=origin,
[3720]4160               zscale=zscale,
4161               fail_on_NaN=fail_on_NaN,
4162               NaN_filler=NaN_filler,
4163               inverted_bathymetry=True,
4164               verbose=verbose)
4165    #print "files_out",files_out
4166    if remove_nc_files:
4167        for file_out in files_out:
4168            os.remove(file_out)
4169   
4170def urs2nc(basename_in = 'o', basename_out = 'urs'):
[3964]4171    """
4172    Convert the 3 urs files to 4 nc files.
4173
4174    The name of the urs file names must be;
[4378]4175    [basename_in]_velocity-z-mux
4176    [basename_in]_velocity-e-mux
4177    [basename_in]_waveheight-n-mux
[3964]4178   
4179    """
[4378]4180   
4181    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4182                basename_in + EAST_VELOCITY_LABEL,
4183                basename_in + NORTH_VELOCITY_LABEL]
[3720]4184    files_out = [basename_out+'_ha.nc',
4185                 basename_out+'_ua.nc',
4186                 basename_out+'_va.nc']
4187    quantities = ['HA','UA','VA']
4188
[5104]4189    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
4190    for i, file_name in enumerate(files_in):
4191        if os.access(file_name, os.F_OK) == 0:
4192            if os.access(file_name+'.mux', os.F_OK) == 0 :
4193                msg = 'File %s does not exist or is not accessible' %file_name
4194                raise IOError, msg
4195            else:
4196               files_in[i] += '.mux'
4197               print "file_name", file_name
[3720]4198    hashed_elevation = None
4199    for file_in, file_out, quantity in map(None, files_in,
4200                                           files_out,
4201                                           quantities):
[3820]4202        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
[3720]4203                                         file_out,
4204                                         quantity)
[3975]4205        #print "lonlatdep", lonlatdep
[3720]4206        if hashed_elevation == None:
4207            elevation_file = basename_out+'_e.nc'
[3964]4208            write_elevation_nc(elevation_file,
[3720]4209                                lon,
4210                                lat,
[3820]4211                                depth)
[3720]4212            hashed_elevation = myhash(lonlatdep)
4213        else:
4214            msg = "The elevation information in the mux files is inconsistent"
4215            assert hashed_elevation == myhash(lonlatdep), msg
4216    files_out.append(elevation_file)
4217    return files_out
4218   
4219def _binary_c2nc(file_in, file_out, quantity):
4220    """
[3964]4221    Reads in a quantity urs file and writes a quantity nc file.
4222    additionally, returns the depth and lat, long info,
4223    so it can be written to a file.
[3720]4224    """
4225    columns = 3 # long, lat , depth
[3964]4226    mux_file = open(file_in, 'rb')
[3720]4227   
4228    # Number of points/stations
[3964]4229    (points_num,)= unpack('i',mux_file.read(4))
[3720]4230
4231    # nt, int - Number of time steps
[3964]4232    (time_step_count,)= unpack('i',mux_file.read(4))
[3720]4233
4234    #dt, float - time step, seconds
[3964]4235    (time_step,) = unpack('f', mux_file.read(4))
[3720]4236   
4237    msg = "Bad data in the mux file."
4238    if points_num < 0:
[3964]4239        mux_file.close()
[3720]4240        raise ANUGAError, msg
4241    if time_step_count < 0:
[3964]4242        mux_file.close()
[3720]4243        raise ANUGAError, msg
4244    if time_step < 0:
[3964]4245        mux_file.close()
[3720]4246        raise ANUGAError, msg
4247   
4248    lonlatdep = p_array.array('f')
[3964]4249    lonlatdep.read(mux_file, columns * points_num)
[3720]4250    lonlatdep = array(lonlatdep, typecode=Float)   
[3826]4251    lonlatdep = reshape(lonlatdep, (points_num, columns))
[3720]4252   
[3820]4253    lon, lat, depth = lon_lat2grid(lonlatdep)
[3830]4254    lon_sorted = list(lon)
[3750]4255    lon_sorted.sort()
[3830]4256
[3964]4257    if not lon == lon_sorted:
[3750]4258        msg = "Longitudes in mux file are not in ascending order"
4259        raise IOError, msg
[3830]4260    lat_sorted = list(lat)
4261    lat_sorted.sort()
4262
[3964]4263    if not lat == lat_sorted:
[3750]4264        msg = "Latitudes in mux file are not in ascending order"
4265   
[3720]4266    nc_file = Write_nc(quantity,
4267                       file_out,
[3826]4268                       time_step_count,
4269                       time_step,
4270                       lon,
4271                       lat)
[3720]4272
4273    for i in range(time_step_count):
[3824]4274        #Read in a time slice  from mux file 
[3720]4275        hz_p_array = p_array.array('f')
[3964]4276        hz_p_array.read(mux_file, points_num)
[3720]4277        hz_p = array(hz_p_array, typecode=Float)
[3820]4278        hz_p = reshape(hz_p, (len(lon), len(lat)))
4279        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
[3824]4280
4281        #write time slice to nc file
[3720]4282        nc_file.store_timestep(hz_p)
[3964]4283    mux_file.close()
[3720]4284    nc_file.close()
4285
[3820]4286    return lonlatdep, lon, lat, depth
[3720]4287   
4288
[3964]4289def write_elevation_nc(file_out, lon, lat, depth_vector):
4290    """
4291    Write an nc elevation file.
4292    """
4293   
[3720]4294    # NetCDF file definition
4295    outfile = NetCDFFile(file_out, 'w')
4296
4297    #Create new file
4298    nc_lon_lat_header(outfile, lon, lat)
4299   
4300    # ELEVATION
4301    zname = 'ELEVATION'
4302    outfile.createVariable(zname, precision, (lat_name, lon_name))
4303    outfile.variables[zname].units='CENTIMETERS'
4304    outfile.variables[zname].missing_value=-1.e+034
4305
4306    outfile.variables[lon_name][:]= ensure_numeric(lon)
4307    outfile.variables[lat_name][:]= ensure_numeric(lat)
4308
4309    depth = reshape(depth_vector, ( len(lat), len(lon)))
4310    outfile.variables[zname][:]= depth
4311   
4312    outfile.close()
4313   
4314def nc_lon_lat_header(outfile, lon, lat):
[3964]4315    """
4316    outfile is the netcdf file handle.
4317    lon - a list/array of the longitudes
4318    lat - a list/array of the latitudes
4319    """
[3720]4320   
4321    outfile.institution = 'Geoscience Australia'
4322    outfile.description = 'Converted from URS binary C'
4323   
4324    # Longitude
4325    outfile.createDimension(lon_name, len(lon))
4326    outfile.createVariable(lon_name, precision, (lon_name,))
4327    outfile.variables[lon_name].point_spacing='uneven'
4328    outfile.variables[lon_name].units='degrees_east'
4329    outfile.variables[lon_name].assignValue(lon)
4330
4331
4332    # Latitude
4333    outfile.createDimension(lat_name, len(lat))
4334    outfile.createVariable(lat_name, precision, (lat_name,))
4335    outfile.variables[lat_name].point_spacing='uneven'
4336    outfile.variables[lat_name].units='degrees_north'
4337    outfile.variables[lat_name].assignValue(lat)
4338
4339
4340   
4341def lon_lat2grid(long_lat_dep):
4342    """
4343    given a list of points that are assumed to be an a grid,
4344    return the long's and lat's of the grid.
4345    long_lat_dep is an array where each row is a position.
4346    The first column is longitudes.
4347    The second column is latitudes.
[3820]4348
4349    The latitude is the fastest varying dimension - in mux files
[3720]4350    """
4351    LONG = 0
4352    LAT = 1
[3820]4353    QUANTITY = 2
[3830]4354
4355    long_lat_dep = ensure_numeric(long_lat_dep, Float)
[3820]4356   
[3826]4357    num_points = long_lat_dep.shape[0]
4358    this_rows_long = long_lat_dep[0,LONG]
[3964]4359
[3826]4360    # Count the length of unique latitudes
4361    i = 0
4362    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4363        i += 1
[3964]4364    # determine the lats and longsfrom the grid
[3826]4365    lat = long_lat_dep[:i, LAT]       
[3964]4366    long = long_lat_dep[::i, LONG]
4367   
[3826]4368    lenlong = len(long)
4369    lenlat = len(lat)
[3964]4370    #print 'len lat', lat, len(lat)
4371    #print 'len long', long, len(long)
[3826]4372         
4373    msg = 'Input data is not gridded'     
4374    assert num_points % lenlat == 0, msg
4375    assert num_points % lenlong == 0, msg
4376         
4377    # Test that data is gridded       
4378    for i in range(lenlong):
[3720]4379        msg = 'Data is not gridded.  It must be for this operation'
[3826]4380        first = i*lenlat
4381        last = first + lenlat
4382               
4383        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4384        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
[3720]4385   
[3826]4386   
4387#    print 'range long', min(long), max(long)
4388#    print 'range lat', min(lat), max(lat)
4389#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4390#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4391   
4392   
4393   
4394    msg = 'Out of range latitudes/longitudes'
[3720]4395    for l in lat:assert -90 < l < 90 , msg
4396    for l in long:assert -180 < l < 180 , msg
4397
[3826]4398    # Changing quantity from lat being the fastest varying dimension to
[3820]4399    # long being the fastest varying dimension
4400    # FIXME - make this faster/do this a better way
4401    # use numeric transpose, after reshaping the quantity vector
[3826]4402#    quantity = zeros(len(long_lat_dep), Float)
4403    quantity = zeros(num_points, Float)
4404   
4405#    print 'num',num_points
[3820]4406    for lat_i, _ in enumerate(lat):
4407        for long_i, _ in enumerate(long):
4408            q_index = lat_i*lenlong+long_i
4409            lld_index = long_i*lenlat+lat_i
[3826]4410#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4411            temp = long_lat_dep[lld_index, QUANTITY]
4412            quantity[q_index] = temp
[3820]4413           
4414    return long, lat, quantity
4415
[5250]4416####  END URS 2 SWW  ###
[4223]4417
[5250]4418#### URS UNGRIDDED 2 SWW ###
[4223]4419
[5250]4420### PRODUCING THE POINTS NEEDED FILE ###
4421
[5248]4422# Ones used for FESA 2007 results
4423#LL_LAT = -50.0
4424#LL_LONG = 80.0
4425#GRID_SPACING = 1.0/60.0
4426#LAT_AMOUNT = 4800
4427#LONG_AMOUNT = 3600
4428
[4318]4429def URS_points_needed_to_file(file_name, boundary_polygon, zone,
[5249]4430                              ll_lat, ll_long,
4431                              grid_spacing, 
4432                              lat_amount, long_amount,
[5253]4433                              isSouthernHemisphere=True,
[4318]4434                              export_csv=False, use_cache=False,
[4284]4435                              verbose=False):
[4238]4436    """
[5253]4437    Given the info to replicate the URS grid and a polygon output
4438    a file that specifies the cloud of boundary points for URS.
[5369]4439
4440    This creates a .urs file.  This is in the format used by URS;
4441    1st line is the number of points,
4442    each line after represents a point,in lats and longs.
[5253]4443   
4444    Note: The polygon cannot cross zones or hemispheres.
[5369]4445
4446    A work-a-round for different zones or hemispheres is to run this twice,
4447    once for each zone, and then combine the output.
[5253]4448   
[4238]4449    file_name - name of the urs file produced for David.
4450    boundary_polygon - a list of points that describes a polygon.
4451                      The last point is assumed ot join the first point.
4452                      This is in UTM (lat long would be better though)
4453
[5248]4454     This is info about the URS model that needs to be inputted.
[5253]4455     
[4238]4456    ll_lat - lower left latitude, in decimal degrees
4457    ll-long - lower left longitude, in decimal degrees
4458    grid_spacing - in deciamal degrees
[5254]4459    lat_amount - number of latitudes
4460    long_amount- number of longs
[4238]4461
4462
4463    Don't add the file extension.  It will be added.
4464    """
[4318]4465    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4466                            grid_spacing, 
[5253]4467                            lat_amount, long_amount, isSouthernHemisphere,
4468                            use_cache, verbose)
[4254]4469    if not file_name[-4:] == ".urs":
4470        file_name += ".urs"
[5253]4471    geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)
[4284]4472    if export_csv:
4473        if file_name[-4:] == ".urs":
4474            file_name = file_name[:-4] + ".csv"
4475        geo.export_points_file(file_name)
[5254]4476    return geo
[4284]4477
[5250]4478def URS_points_needed(boundary_polygon, zone, ll_lat,
4479                      ll_long, grid_spacing, 
[5253]4480                      lat_amount, long_amount, isSouthHemisphere=True,
[4318]4481                      use_cache=False, verbose=False):
4482    args = (boundary_polygon,
[5253]4483            zone, ll_lat,
4484            ll_long, grid_spacing, 
4485            lat_amount, long_amount, isSouthHemisphere)
4486    kwargs = {} 
[4284]4487    if use_cache is True:
4488        try:
4489            from anuga.caching import cache
4490        except:
4491            msg = 'Caching was requested, but caching module'+\
4492                  'could not be imported'
4493            raise msg
4494
4495
4496        geo = cache(_URS_points_needed,
4497                  args, kwargs,
4498                  verbose=verbose,
4499                  compression=False)
4500    else:
[5253]4501        geo = apply(_URS_points_needed, args, kwargs)
[5250]4502
4503    return geo
4504
4505def _URS_points_needed(boundary_polygon,
[4318]4506                      zone, ll_lat,
[4284]4507                      ll_long, grid_spacing, 
[5253]4508                      lat_amount, long_amount,
4509                       isSouthHemisphere):
[4223]4510    """
4511    boundary_polygon - a list of points that describes a polygon.
4512                      The last point is assumed ot join the first point.
[5250]4513                      This is in UTM (lat long would b better though)
[4223]4514
4515    ll_lat - lower left latitude, in decimal degrees
4516    ll-long - lower left longitude, in decimal degrees
4517    grid_spacing - in deciamal degrees
4518
4519    """
[4254]4520   
[4223]4521    from sets import ImmutableSet
4522   
4523    msg = "grid_spacing can not be zero"
[5250]4524    assert not grid_spacing == 0, msg
[4223]4525    a = boundary_polygon
4526    # List of segments.  Each segment is two points.
4527    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4528    # convert the segs to Lat's and longs.
[4318]4529   
4530    # Don't assume the zone of the segments is the same as the lower left
4531    # corner of the lat long data!!  They can easily be in different zones
4532   
[4223]4533    lat_long_set = ImmutableSet()
4534    for seg in segs:
4535        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4536                      lat_amount, long_amount, zone, isSouthHemisphere)
[4223]4537        lat_long_set |= ImmutableSet(points_lat_long)
[5253]4538    if lat_long_set == ImmutableSet([]):
4539        msg = """URS region specified and polygon does not overlap."""
4540        raise ValueError, msg
4541
4542    # Warning there is no info in geospatial saying the hemisphere of
4543    # these points.  There should be.
[4223]4544    geo = Geospatial_data(data_points=list(lat_long_set),
4545                              points_are_lats_longs=True)
4546    return geo
4547   
4548def points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4549                  lat_amount, long_amount, zone,
4550                  isSouthHemisphere):
[4223]4551    """
[5254]4552    seg is two points, in UTM
[4223]4553    return a list of the points, in lats and longs that are needed to
4554    interpolate any point on the segment.
4555    """
[4280]4556    from math import sqrt
[4223]4557    #print "zone",zone
4558    geo_reference = Geo_reference(zone=zone)
[5254]4559    #print "seg", seg
[4223]4560    geo = Geospatial_data(seg,geo_reference=geo_reference)
[5253]4561    seg_lat_long = geo.get_data_points(as_lat_long=True,
4562                                       isSouthHemisphere=isSouthHemisphere)
[5254]4563    #print "seg_lat_long", seg_lat_long
[4223]4564    # 1.415 = 2^0.5, rounded up....
[4280]4565    sqrt_2_rounded_up = 1.415
4566    buffer = sqrt_2_rounded_up * grid_spacing
[4223]4567   
4568    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4569    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
[4238]4570    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4571    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
[4223]4572
[5254]4573    #print "min_long", min_long
4574    #print "ll_long", ll_long
4575    #print "grid_spacing", grid_spacing
[4223]4576    first_row = (min_long - ll_long)/grid_spacing
4577    # To round up
4578    first_row_long = int(round(first_row + 0.5))
4579    #print "first_row", first_row_long
4580
4581    last_row = (max_long - ll_long)/grid_spacing # round down
4582    last_row_long = int(round(last_row))
4583    #print "last_row",last_row _long
4584   
4585    first_row = (min_lat - ll_lat)/grid_spacing
4586    # To round up
4587    first_row_lat = int(round(first_row + 0.5))
4588    #print "first_row", first_row_lat
4589
4590    last_row = (max_lat - ll_lat)/grid_spacing # round down
4591    last_row_lat = int(round(last_row))
4592    #print "last_row",last_row_lat
4593
[4283]4594    # to work out the max distance -
4595    # 111120 - horizontal distance between 1 deg latitude.
4596    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4597    max_distance = 157147.4112 * grid_spacing
4598    #print "max_distance", max_distance #2619.12 m for 1 minute
[4223]4599    points_lat_long = []
4600    # Create a list of the lat long points to include.
4601    for index_lat in range(first_row_lat, last_row_lat + 1):
4602        for index_long in range(first_row_long, last_row_long + 1):
[5254]4603            #print "index_lat", index_lat
4604            #print "index_long", index_long
[4223]4605            lat = ll_lat + index_lat*grid_spacing
4606            long = ll_long + index_long*grid_spacing
[4280]4607
4608            #filter here to keep good points
4609            if keep_point(lat, long, seg, max_distance):
4610                points_lat_long.append((lat, long)) #must be hashable
[4223]4611    #print "points_lat_long", points_lat_long
[4268]4612
[4280]4613    # Now that we have these points, lets throw ones out that are too far away
4614    return points_lat_long
4615
4616def keep_point(lat, long, seg, max_distance):
4617    """
4618    seg is two points, UTM
4619    """
4620    from math import sqrt
4621    _ , x0, y0 = redfearn(lat, long)
4622    x1 = seg[0][0]
4623    y1 = seg[0][1]
4624    x2 = seg[1][0]
4625    y2 = seg[1][1]
4626    x2_1 = x2-x1
4627    y2_1 = y2-y1
[5254]4628    try:
4629        d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \
4630            (x2_1)*(x2_1)+(y2_1)*(y2_1))
4631    except ZeroDivisionError:
4632        #print "seg", seg
4633        #print "x0", x0
4634        #print "y0", y0
4635        if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \
4636           abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
4637            return True
4638        else:
4639            return False
4640   
[4280]4641    if d <= max_distance:
4642        return True
4643    else:
4644        return False
4645   
[4268]4646    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
[4378]4647   
[4400]4648WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
[4378]4649EAST_VELOCITY_LABEL =  '_velocity-e-mux'
[4400]4650NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
[4268]4651def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
[4382]4652                      mint=None, maxt=None,
4653                      mean_stage=0,
4654                      origin=None,
4655                      hole_points_UTM=None,
4656                      zscale=1):
[4298]4657    """   
4658    Convert URS C binary format for wave propagation to
4659    sww format native to abstract_2d_finite_volumes.
4660
[4378]4661
[4298]4662    Specify only basename_in and read files of the form
[4378]4663    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4664    basefilename_waveheight-n-mux containing relative height,
4665    x-velocity and y-velocity, respectively.
[4298]4666
4667    Also convert latitude and longitude to UTM. All coordinates are
4668    assumed to be given in the GDA94 datum. The latitude and longitude
4669    information is assumed ungridded grid.
4670
4671    min's and max's: If omitted - full extend is used.
4672    To include a value min ans max may equal it.
4673    Lat and lon are assumed to be in decimal degrees.
4674   
4675    origin is a 3-tuple with geo referenced
4676    UTM coordinates (zone, easting, northing)
4677    It will be the origin of the sww file. This shouldn't be used,
4678    since all of anuga should be able to handle an arbitary origin.
[4455]4679    The mux point info is NOT relative to this origin.
[4298]4680
4681
4682    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4683    which means that latitude is the fastest
4684    varying dimension (row major order, so to speak)
4685
4686    In URS C binary the latitudes and longitudes are in assending order.
[4363]4687
4688    Note, interpolations of the resulting sww file will be different
4689    from results of urs2sww.  This is due to the interpolation
4690    function used, and the different grid structure between urs2sww
4691    and this function.
4692   
4693    Interpolating data that has an underlying gridded source can
4694    easily end up with different values, depending on the underlying
4695    mesh.
4696
4697    consider these 4 points
4698    50  -50
4699
4700    0     0
4701
4702    The grid can be
4703     -
4704    |\|    A
4705     -
4706     or;
4707      -
4708     |/|   B
4709      -
4710      If a point is just below the center of the midpoint, it will have a
4711      +ve value in grid A and a -ve value in grid B.
[4292]4712    """ 
[4899]4713    from anuga.mesh_engine.mesh_engine import NoTrianglesError
4714    from anuga.pmesh.mesh import Mesh
[4280]4715
[4378]4716    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4717                basename_in + EAST_VELOCITY_LABEL,
4718                basename_in + NORTH_VELOCITY_LABEL]
[4271]4719    quantities = ['HA','UA','VA']
[4268]4720
[4271]4721    # instanciate urs_points of the three mux files.
4722    mux = {}
4723    for quantity, file in map(None, quantities, files_in):
4724        mux[quantity] = Urs_points(file)
4725       
[4298]4726    # Could check that the depth is the same. (hashing)
[4268]4727
[4271]4728    # handle to a mux file to do depth stuff
4729    a_mux = mux[quantities[0]]
4730   
4731    # Convert to utm
4732    lat = a_mux.lonlatdep[:,1]
4733    long = a_mux.lonlatdep[:,0]
4734    points_utm, zone = convert_from_latlon_to_utm( \
4735        latitudes=lat, longitudes=long)
4736    #print "points_utm", points_utm
4737    #print "zone", zone
4738
[4280]4739    elevation = a_mux.lonlatdep[:,2] * -1 #
[4271]4740   
4741    # grid ( create a mesh from the selected points)
[4280]4742    # This mesh has a problem.  Triangles are streched over ungridded areas.
4743    #  If these areas could be described as holes in pmesh, that would be great
[4381]4744
4745    # I can't just get the user to selection a point in the middle.
4746    # A boundary is needed around these points.
4747    # But if the zone of points is obvious enough auto-segment should do
4748    # a good boundary.
[4271]4749    mesh = Mesh()
4750    mesh.add_vertices(points_utm)
[4455]4751    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4752    # To try and avoid alpha shape 'hugging' too much
4753    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
[4382]4754    if hole_points_UTM is not None:
4755        point = ensure_absolute(hole_points_UTM)
4756        mesh.add_hole(point[0], point[1])
4757    try:
4758        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4759    except NoTrianglesError:
4760        # This is a bit of a hack, going in and changing the
4761        # data structure.
4762        mesh.holes = []
4763        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
[4271]4764    mesh_dic = mesh.Mesh2MeshList()
4765
[4522]4766    #mesh.export_mesh_file(basename_in + '_168.tsh')
4767    #import sys; sys.exit()
[4295]4768    # These are the times of the mux file
4769    mux_times = []
[4271]4770    for i in range(a_mux.time_step_count):
[4295]4771        mux_times.append(a_mux.time_step * i) 
4772    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4773    times = mux_times[mux_times_start_i:mux_times_fin_i]
4774   
4775    if mux_times_start_i == mux_times_fin_i:
4776        # Close the mux files
4777        for quantity, file in map(None, quantities, files_in):
4778            mux[quantity].close()
4779        msg="Due to mint and maxt there's no time info in the boundary SWW."
[4608]4780        raise Exception, msg
[4271]4781       
[4295]4782    # If this raise is removed there is currently no downstream errors
4783           
[4271]4784    points_utm=ensure_numeric(points_utm)
4785    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4786           ensure_numeric(points_utm)
4787   
4788    volumes = mesh_dic['generatedtrianglelist']
4789   
4790    # write sww intro and grid stuff.   
[4268]4791    if basename_out is None:
4792        swwname = basename_in + '.sww'
4793    else:
4794        swwname = basename_out + '.sww'
4795
[4348]4796    if verbose: print 'Output to ', swwname
[4268]4797    outfile = NetCDFFile(swwname, 'w')
[4295]4798    # For a different way of doing this, check out tsh2sww
4799    # work out sww_times and the index range this covers
[4455]4800    sww = Write_sww()
[4704]4801    sww.store_header(outfile, times, len(volumes), len(points_utm),
[4862]4802                     verbose=verbose,sww_precision=Float)
[4295]4803    outfile.mean_stage = mean_stage
4804    outfile.zscale = zscale
[4455]4805
[4704]4806    sww.store_triangulation(outfile, points_utm, volumes,
[4455]4807                            elevation, zone,  new_origin=origin,
4808                            verbose=verbose)
[4292]4809   
4810    if verbose: print 'Converting quantities'
[4280]4811    j = 0
4812    # Read in a time slice from each mux file and write it to the sww file
[4455]4813    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
[4295]4814        if j >= mux_times_start_i and j < mux_times_fin_i:
[4455]4815            stage = zscale*ha + mean_stage
4816            h = stage - elevation
4817            xmomentum = ua*h
4818            ymomentum = -1*va*h # -1 since in mux files south is positive.
[4704]4819            sww.store_quantities(outfile, 
4820                                 slice_index=j - mux_times_start_i,
4821                                 verbose=verbose,
4822                                 stage=stage,
4823                                 xmomentum=xmomentum,
[4862]4824                                 ymomentum=ymomentum,
4825                                 sww_precision=Float)
[4280]4826        j += 1
[4455]4827    if verbose: sww.verbose_quantities(outfile)
[4280]4828    outfile.close()
[4271]4829    #
4830    # Do some conversions while writing the sww file
[4455]4831
[5347]4832    ##################################
4833    # READ MUX2 FILES line of points #
4834    ##################################
4835
4836WAVEHEIGHT_MUX2_LABEL = '_waveheight-z-mux2'
4837EAST_VELOCITY_MUX2_LABEL =  '_velocity-e-mux2'
4838NORTH_VELOCITY_MUX2_LABEL =  '_velocity-n-mux2'   
4839
4840def read_mux2_py(filenames,weights=None):
4841
4842    from Numeric import ones,Float,compress,zeros,arange
4843    from urs_ext import read_mux2
4844
4845    if weights is None:
4846        weights=ones(len(filenames),Float)/len(filenames) #default 1/numSrc
4847
4848    file_params=-1*ones(3,Float)#[nsta,dt,nt]
[5358]4849    write=0 #if true write txt files to current directory as well
[5347]4850    data=read_mux2(1,filenames,weights,file_params,write)
4851
4852    msg='File parameter values were not read in correctly from c file'
4853    assert len(compress(file_params>0,file_params))!=0,msg
4854    msg='The number of stations specifed in the c array and in the file are inconsitent'
4855    assert file_params[0]==data.shape[0],msg
[4455]4856   
[5347]4857    nsta=int(file_params[0])
4858    msg='Must have at least one station'
4859    assert nsta>0,msg
4860    dt=file_params[1]
4861    msg='Must have a postive timestep'
4862    assert dt>0,msg
4863    nt=int(file_params[2])
4864    msg='Must have at least one gauge value'
4865    assert nt>0,msg
4866   
4867    OFFSET=5 #number of site parameters p passed back with data
4868    #p=[geolat,geolon,depth,start_tstep,finish_tstep]
4869
4870    times=dt*arange(1,(data.shape[1]-OFFSET)+1)
4871    latitudes=zeros(data.shape[0],Float)
4872    longitudes=zeros(data.shape[0],Float)
4873    elevation=zeros(data.shape[0],Float)
[5358]4874    quantity=zeros((data.shape[0],data.shape[1]-OFFSET),Float)
[5347]4875    for i in range(0,data.shape[0]):
4876        latitudes[i]=data[i][data.shape[1]-OFFSET]
4877        longitudes[i]=data[i][data.shape[1]-OFFSET+1]
4878        elevation[i]=-data[i][data.shape[1]-OFFSET+2]
[5358]4879        quantity[i]=data[i][:-OFFSET]
[5347]4880
[5358]4881    return times, latitudes, longitudes, elevation, quantity
[5347]4882
[4295]4883def mux2sww_time(mux_times, mint, maxt):
4884    """
4885    """
[4271]4886
[4295]4887    if mint == None:
4888        mux_times_start_i = 0
4889    else:
4890        mux_times_start_i = searchsorted(mux_times, mint)
4891       
4892    if maxt == None:
4893        mux_times_fin_i = len(mux_times)
4894    else:
4895        maxt += 0.5 # so if you specify a time where there is
4896                    # data that time will be included
4897        mux_times_fin_i = searchsorted(mux_times, maxt)
4898
4899    return mux_times_start_i, mux_times_fin_i
4900
[4455]4901
[5347]4902def urs2sts(basename_in, basename_out = None, verbose = False, origin = None,
4903            mean_stage=0.0,zscale=1.0,
4904            minlat = None, maxlat = None,
4905            minlon = None, maxlon = None):
4906    """Convert URS mux2 format for wave propagation to sts format
4907
4908    Specify only basename_in and read files of the form
4909    out_waveheight-z-mux2
4910
4911    Also convert latitude and longitude to UTM. All coordinates are
4912    assumed to be given in the GDA94 datum
4913
4914    origin is a 3-tuple with geo referenced
4915    UTM coordinates (zone, easting, northing)
4916    """
4917    import os
4918    from Scientific.IO.NetCDF import NetCDFFile
4919    from Numeric import Float, Int, Int32, searchsorted, zeros, array
4920    from Numeric import allclose, around
4921
4922    precision = Float
4923
4924    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
4925
4926    if minlat != None:
4927        assert -90 < minlat < 90 , msg
4928    if maxlat != None:
4929        assert -90 < maxlat < 90 , msg
4930        if minlat != None:
4931            assert maxlat > minlat
4932    if minlon != None:
4933        assert -180 < minlon < 180 , msg
4934    if maxlon != None:
4935        assert -180 < maxlon < 180 , msg
4936        if minlon != None:
4937            assert maxlon > minlon
4938
4939    if basename_out is None:
4940        stsname = basename_in + '.sts'
4941    else:
4942        stsname = basename_out + '.sts'
4943
4944    files_in = [basename_in + WAVEHEIGHT_MUX2_LABEL,
4945                basename_in + EAST_VELOCITY_MUX2_LABEL,
4946                basename_in + NORTH_VELOCITY_MUX2_LABEL]
4947    quantities = ['HA','UA','VA']
4948
[5358]4949    for file_in in files_in:
4950        if (os.access(file_in, os.F_OK) == 0):
4951            msg = 'File %s does not exist or is not accessible' %file_in
4952            raise IOError, msg
4953
[5347]4954    #need to do this for velocity-e-mux2 and velocity-n-mux2 files as well
4955    #for now set x_momentum and y_moentum quantities to zero
4956    if (verbose): print 'reading mux2 file'
4957    mux={}
4958    for quantity, file in map(None, quantities, files_in):
4959        times_urs, latitudes_urs, longitudes_urs, elevation, mux[quantity] = read_mux2_py([file])
4960        if quantity!=quantities[0]:
4961            msg='%s, %s and %s have inconsitent gauge data'%(files_in[0],files_in[1],files_in[2])
4962            assert allclose(times_urs,times_urs_old),msg
4963            assert allclose(latitudes_urs,latitudes_urs_old),msg
4964            assert allclose(longitudes_urs,longitudes_urs_old),msg
4965            assert allclose(elevation,elevation_old),msg
4966        times_urs_old=times_urs
4967        latitudes_urs_old=latitudes_urs
4968        longitudes_urs_old=longitudes_urs
4969        elevation_old=elevation
4970       
4971    if (minlat is not None) and (minlon is not None) and (maxlat is not None) and (maxlon is not None):
[5358]4972        if verbose: print 'Cliiping urs data'
[5347]4973        latitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),latitudes_urs)
4974        longitudes = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),longitudes_urs)
4975        times = compress((latitudes_urs>=minlat)&(latitudes_urs<=maxlat)&(longitudes_urs>=minlon)&(longitudes_urs<=maxlon),times_urs)
4976    else:
4977        latitudes=latitudes_urs
4978        longitudes=longitudes_urs
4979        times=times_urs
4980
[5358]4981    msg='File is empty and or clipped region not in file region'
4982    assert len(latitudes>0),msg
4983
[5347]4984    number_of_points = latitudes.shape[0]
4985    number_of_times = times.shape[0]
4986    number_of_latitudes = latitudes.shape[0]
4987    number_of_longitudes = longitudes.shape[0]
4988
4989    # NetCDF file definition
4990    outfile = NetCDFFile(stsname, 'w')
4991
4992    description = 'Converted from URS mux2 files: %s'\
4993                  %(basename_in)
4994   
4995    # Create new file
4996    starttime = times[0]
4997    sts = Write_sts()
4998    sts.store_header(outfile, times,number_of_points, description=description,
4999                     verbose=verbose,sts_precision=Float)
5000   
5001    # Store
5002    from anuga.coordinate_transforms.redfearn import redfearn
5003    x = zeros(number_of_points, Float)  #Easting
5004    y = zeros(number_of_points, Float)  #Northing
5005
5006    # Check zone boundaries
5007    refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 
5008
5009    for i in range(number_of_points):
5010        zone, easting, northing = redfearn(latitudes[i],longitudes[i])
5011        x[i] = easting
5012        y[i] = northing
[5358]5013        msg='all sts gauges need to be in the same zone'
5014        assert zone==refzone,msg
[5347]5015
5016    if origin is None:
5017        origin = Geo_reference(refzone,min(x),min(y))
5018    geo_ref = write_NetCDF_georeference(origin, outfile)
5019
5020    z = elevation
5021   
5022    #print geo_ref.get_xllcorner()
5023    #print geo_ref.get_yllcorner()
5024
5025    z = resize(z,outfile.variables['z'][:].shape)
5026    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
5027    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
5028    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
5029    outfile.variables['elevation'][:] = z
5030
5031    stage = outfile.variables['stage']
5032    xmomentum = outfile.variables['xmomentum']
5033    ymomentum =outfile.variables['ymomentum']
5034
5035    if verbose: print 'Converting quantities'
5036    for j in range(len(times)):
5037        for i in range(number_of_points):
5038            w = zscale*mux['HA'][i,j] + mean_stage
5039            h=w-elevation[i]
5040            stage[j,i] = w
5041            #delete following two lines once velcotu files are read in.
5042            xmomentum[j,i] = mux['UA'][i,j]*h
5043            ymomentum[j,i] = mux['VA'][i,j]*h
5044
5045    outfile.close()
5046
[4455]5047class Write_sww:
[4704]5048    from anuga.shallow_water.shallow_water_domain import Domain
5049
5050    # FIXME (Ole): Hardwiring the conserved quantities like
5051    # this could be a problem. I would prefer taking them from
5052    # the instantiation of Domain.
[4780]5053    #
5054    # (DSG) There is not always a Domain instance when Write_sww is used.
5055    # Check to see if this is the same level of hardwiring as is in
5056    # shallow water doamain.
5057   
[4455]5058    sww_quantities = Domain.conserved_quantities
[4704]5059
5060
[4455]5061    RANGE = '_range'
[4704]5062    EXTREMA = ':extrema'
[4699]5063
[4455]5064    def __init__(self):
5065        pass
[4387]5066   
[4704]5067    def store_header(self,
5068                     outfile,
5069                     times,
5070                     number_of_volumes,
5071                     number_of_points,
5072                     description='Converted from XXX',
5073                     smoothing=True,
[4862]5074                     order=1,
5075                     sww_precision=Float32,
5076                     verbose=False):
[4455]5077        """
5078        outfile - the name of the file that will be written
[4558]5079        times - A list of the time slice times OR a start time
5080        Note, if a list is given the info will be made relative.
[4455]5081        number_of_volumes - the number of triangles
5082        """
[4268]5083   
[4455]5084        outfile.institution = 'Geoscience Australia'
5085        outfile.description = description
[4268]5086
[4699]5087        # For sww compatibility
[4455]5088        if smoothing is True:
5089            # Smoothing to be depreciated
5090            outfile.smoothing = 'Yes'
5091            outfile.vertices_are_stored_uniquely = 'False'
5092        else:
5093            # Smoothing to be depreciated
5094            outfile.smoothing = 'No'
5095            outfile.vertices_are_stored_uniquely = 'True'
5096        outfile.order = order
[4268]5097
[4455]5098        try:
5099            revision_number = get_revision_number()
5100        except:
5101            revision_number = None
[4699]5102        # Allow None to be stored as a string               
5103        outfile.revision_number = str(revision_number) 
[4268]5104
[4699]5105
5106       
5107        # times - A list or array of the time slice times OR a start time
5108        # times = ensure_numeric(times)
5109        # Start time in seconds since the epoch (midnight 1/1/1970)
5110
5111        # This is being used to seperate one number from a list.
[4455]5112        # what it is actually doing is sorting lists from numeric arrays.
5113        if type(times) is list or type(times) is ArrayType: 
5114            number_of_times = len(times)
[4558]5115            times = ensure_numeric(times) 
5116            if number_of_times == 0:
5117                starttime = 0
5118            else:
5119                starttime = times[0]
5120                times = times - starttime  #Store relative times
[4455]5121        else:
5122            number_of_times = 0
[4558]5123            starttime = times
5124            #times = ensure_numeric([])
5125        outfile.starttime = starttime
[4455]5126        # dimension definitions
5127        outfile.createDimension('number_of_volumes', number_of_volumes)
5128        outfile.createDimension('number_of_vertices', 3)
5129        outfile.createDimension('numbers_in_range', 2)
5130   
5131        if smoothing is True:
5132            outfile.createDimension('number_of_points', number_of_points)
5133       
5134            # FIXME(Ole): This will cause sww files for paralle domains to
5135            # have ghost nodes stored (but not used by triangles).
5136            # To clean this up, we have to change get_vertex_values and
5137            # friends in quantity.py (but I can't be bothered right now)
5138        else:
5139            outfile.createDimension('number_of_points', 3*number_of_volumes)
5140        outfile.createDimension('number_of_timesteps', number_of_times)
[4268]5141
[4455]5142        # variable definitions
[4862]5143        outfile.createVariable('x', sww_precision, ('number_of_points',))
5144        outfile.createVariable('y', sww_precision, ('number_of_points',))
5145        outfile.createVariable('elevation', sww_precision, ('number_of_points',))
[4455]5146        q = 'elevation'
[4862]5147        outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5148                               ('numbers_in_range',))
[4268]5149
[4704]5150
[4699]5151        # Initialise ranges with small and large sentinels.
5152        # If this was in pure Python we could have used None sensibly
[4862]5153        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5154        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
5155
[4704]5156        # FIXME: Backwards compatibility
[4862]5157        outfile.createVariable('z', sww_precision, ('number_of_points',))
[4455]5158        #################################
[4268]5159
[4455]5160        outfile.createVariable('volumes', Int, ('number_of_volumes',
5161                                                'number_of_vertices'))
[4862]5162        # Doing sww_precision instead of Float gives cast errors.
5163        outfile.createVariable('time', Float,
[4455]5164                               ('number_of_timesteps',))
5165       
5166        for q in Write_sww.sww_quantities:
[4862]5167            outfile.createVariable(q, sww_precision,
[4455]5168                                   ('number_of_timesteps',
5169                                    'number_of_points')) 
[4862]5170            outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5171                                   ('numbers_in_range',))
[4699]5172
5173            # Initialise ranges with small and large sentinels.
5174            # If this was in pure Python we could have used None sensibly
[4862]5175            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5176            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
[4455]5177           
[4541]5178        if type(times) is list or type(times) is ArrayType: 
5179            outfile.variables['time'][:] = times    #Store time relative
5180           
[4455]5181        if verbose:
5182            print '------------------------------------------------'
5183            print 'Statistics:'
5184            print '    t in [%f, %f], len(t) == %d'\
5185                  %(min(times.flat), max(times.flat), len(times.flat))
[4268]5186
[4455]5187       
[4704]5188    def store_triangulation(self,
5189                            outfile,
5190                            points_utm,
5191                            volumes,
5192                            elevation, zone=None, new_origin=None, 
5193                            points_georeference=None, verbose=False):
[4455]5194        """
5195       
5196        new_origin - qa georeference that the points can be set to. (Maybe
5197        do this before calling this function.)
5198       
5199        points_utm - currently a list or array of the points in UTM.
5200        points_georeference - the georeference of the points_utm
5201       
5202        How about passing new_origin and current_origin.
5203        If you get both, do a convertion from the old to the new.
5204       
[4665]5205        If you only get new_origin, the points are absolute,
5206        convert to relative
[4455]5207       
5208        if you only get the current_origin the points are relative, store
5209        as relative.
5210       
5211        if you get no georefs create a new georef based on the minimums of
5212        points_utm.  (Another option would be to default to absolute)
5213       
5214        Yes, and this is done in another part of the code.
5215        Probably geospatial.
5216       
5217        If you don't supply either geo_refs, then supply a zone. If not
5218        the default zone will be used.
5219       
5220       
5221        precon
5222       
5223        header has been called.
5224        """
5225       
5226        number_of_points = len(points_utm)   
[4665]5227        volumes = array(volumes) 
5228        points_utm = array(points_utm)
[4268]5229
[4455]5230        # given the two geo_refs and the points, do the stuff
5231        # described in the method header
5232        # if this is needed else where, pull out as a function
5233        points_georeference = ensure_geo_reference(points_georeference)
5234        new_origin = ensure_geo_reference(new_origin)
5235        if new_origin is None and points_georeference is not None:
5236            points = points_utm
5237            geo_ref = points_georeference
5238        else:
5239            if new_origin is None:
5240                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5241                                           min(points_utm[:,1]))
5242            points = new_origin.change_points_geo_ref(points_utm,
5243                                                      points_georeference)
5244            geo_ref = new_origin
[4268]5245
[4455]5246        # At this stage I need a georef and points
5247        # the points are relative to the georef
5248        geo_ref.write_NetCDF(outfile)
[4280]5249   
[4455]5250        # This will put the geo ref in the middle
[4665]5251        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
5252       
[4455]5253        x =  points[:,0]
5254        y =  points[:,1]
5255        z = outfile.variables['z'][:]
5256   
5257        if verbose:
5258            print '------------------------------------------------'
5259            print 'More Statistics:'
5260            print '  Extent (/lon):'
5261            print '    x in [%f, %f], len(lat) == %d'\
5262                  %(min(x), max(x),
5263                    len(x))
5264            print '    y in [%f, %f], len(lon) == %d'\
5265                  %(min(y), max(y),
5266                    len(y))
5267            print '    z in [%f, %f], len(z) == %d'\
5268                  %(min(elevation), max(elevation),
5269                    len(elevation))
5270            print 'geo_ref: ',geo_ref
5271            print '------------------------------------------------'
5272           
5273        #z = resize(bath_grid,outfile.variables['z'][:].shape)
[4862]5274        #print "points[:,0]", points[:,0]
[4455]5275        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5276        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5277        outfile.variables['z'][:] = elevation
5278        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5279        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
[4268]5280
[4455]5281        q = 'elevation'
5282        # This updates the _range values
5283        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5284        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
[4280]5285
[4704]5286
[4862]5287    def store_quantities(self, outfile, sww_precision=Float32,
[4704]5288                         slice_index=None, time=None,
5289                         verbose=False, **quant):
[4455]5290        """
5291        Write the quantity info.
[4280]5292
[4455]5293        **quant is extra keyword arguments passed in. These must be
5294          the sww quantities, currently; stage, xmomentum, ymomentum.
5295       
5296        if the time array is already been built, use the slice_index
5297        to specify the index.
5298       
5299        Otherwise, use time to increase the time dimension
[4280]5300
[4455]5301        Maybe make this general, but the viewer assumes these quantities,
5302        so maybe we don't want it general - unless the viewer is general
5303       
5304        precon
5305        triangulation and
5306        header have been called.
5307        """
[4280]5308
[4455]5309        if time is not None:
5310            file_time = outfile.variables['time']
5311            slice_index = len(file_time)
5312            file_time[slice_index] = time   
5313
[4938]5314        # Write the conserved quantities from Domain.
[4455]5315        # Typically stage,  xmomentum, ymomentum
5316        # other quantities will be ignored, silently.
[4938]5317        # Also write the ranges: stage_range,
5318        # xmomentum_range and ymomentum_range
[4455]5319        for q in Write_sww.sww_quantities:
5320            if not quant.has_key(q):
5321                msg = 'SWW file can not write quantity %s' %q
5322                raise NewQuantity, msg
5323            else:
5324                q_values = quant[q]
[4862]5325                outfile.variables[q][slice_index] = \
5326                                q_values.astype(sww_precision)
[4455]5327
5328                # This updates the _range values
5329                q_range = outfile.variables[q+Write_sww.RANGE][:]
5330                q_values_min = min(q_values)
5331                if q_values_min < q_range[0]:
5332                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5333                q_values_max = max(q_values)
5334                if q_values_max > q_range[1]:
5335                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5336
5337    def verbose_quantities(self, outfile):
[4280]5338        print '------------------------------------------------'
5339        print 'More Statistics:'
[4455]5340        for q in Write_sww.sww_quantities:
[4699]5341            print %s in [%f, %f]' %(q,
5342                                       outfile.variables[q+Write_sww.RANGE][0],
5343                                       outfile.variables[q+Write_sww.RANGE][1])
[4280]5344        print '------------------------------------------------'
[4699]5345
[4704]5346
[4455]5347       
5348def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
[4280]5349                         mean_stage=0, zscale=1,
5350                         verbose=False):   
5351    #Time stepping
5352    stage = outfile.variables['stage']
5353    xmomentum = outfile.variables['xmomentum']
5354    ymomentum = outfile.variables['ymomentum']
5355
5356    n = len(has)
5357    j=0
5358    for ha, ua, va in map(None, has, uas, vas):
5359        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5360        w = zscale*ha + mean_stage
5361        stage[j] = w
5362        h = w - elevation
5363        xmomentum[j] = ua*h
[4348]5364        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
[4280]5365        j += 1
[4455]5366   
[4303]5367def urs2txt(basename_in, location_index=None):
[4301]5368    """
5369    Not finished or tested
5370    """
[4378]5371   
5372    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5373                basename_in + EAST_VELOCITY_LABEL,
5374                basename_in + NORTH_VELOCITY_LABEL]
[4301]5375    quantities = ['HA','UA','VA']
5376
[4303]5377    d = ","
5378   
[4301]5379    # instanciate urs_points of the three mux files.
5380    mux = {}
5381    for quantity, file in map(None, quantities, files_in):
5382        mux[quantity] = Urs_points(file)
[4280]5383       
[4301]5384    # Could check that the depth is the same. (hashing)
5385
5386    # handle to a mux file to do depth stuff
5387    a_mux = mux[quantities[0]]
5388   
5389    # Convert to utm
[4303]5390    latitudes = a_mux.lonlatdep[:,1]
5391    longitudes = a_mux.lonlatdep[:,0]
[4301]5392    points_utm, zone = convert_from_latlon_to_utm( \
[4303]5393        latitudes=latitudes, longitudes=longitudes)
[4301]5394    #print "points_utm", points_utm
5395    #print "zone", zone
[4303]5396    depths = a_mux.lonlatdep[:,2]  #
[4301]5397   
5398    fid = open(basename_in + '.txt', 'w')
5399
[4303]5400    fid.write("zone: " + str(zone) + "\n")
[4301]5401
[4303]5402    if location_index is not None:
5403        #Title
5404        li = location_index
5405        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5406                  'Northing' + "\n")
5407        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5408              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5409              str(points_utm[li][01]) + "\n")
5410
5411    # the non-time dependent stuff
5412    #Title
5413    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
[5403]5414              'Northing' +d+ 'depth m' + "\n")
[4303]5415    i = 0
5416    for depth, point_utm, lat, long in map(None, depths,
5417                                               points_utm, latitudes,
5418                                               longitudes):
5419       
5420        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5421                  str(point_utm[01]) +d+ str(depth) + "\n")
5422        i +=1
5423    #Time dependent
5424    if location_index is not None:
5425        time_step = a_mux.time_step
5426        i = 0
5427        #Title
5428        fid.write('time' +d+ 'HA depth m'+d+ \
[4665]5429                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
[4303]5430                      + "\n")
5431        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5432            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5433                      str(UA[location_index]) +d+ str(VA[location_index]) \
5434                      + "\n")
5435           
5436            i +=1
[5347]5437
5438class Write_sts:
5439
5440    sts_quantities = ['stage','xmomentum','ymomentum']
5441
5442
5443    RANGE = '_range'
5444    EXTREMA = ':extrema'
[4301]5445   
[5347]5446    def __init__(self):
5447        pass
5448
5449    def store_header(self,
5450                     outfile,
5451                     times,
5452                     number_of_points,
5453                     description='Converted from URS mux2 format',
5454                     sts_precision=Float32,
5455                     verbose=False):
5456        """
5457        outfile - the name of the file that will be written
5458        times - A list of the time slice times OR a start time
5459        Note, if a list is given the info will be made relative.
5460        number_of_points - the number of urs gauges sites
5461        """
5462
5463        outfile.institution = 'Geoscience Australia'
5464        outfile.description = description
5465       
5466        try:
5467            revision_number = get_revision_number()
5468        except:
5469            revision_number = None
5470        # Allow None to be stored as a string               
5471        outfile.revision_number = str(revision_number) 
5472       
5473        # times - A list or array of the time slice times OR a start time
5474        # Start time in seconds since the epoch (midnight 1/1/1970)
5475
5476        # This is being used to seperate one number from a list.
5477        # what it is actually doing is sorting lists from numeric arrays.
5478        if type(times) is list or type(times) is ArrayType: 
5479            number_of_times = len(times)
5480            times = ensure_numeric(times) 
5481            if number_of_times == 0:
5482                starttime = 0
5483            else:
5484                starttime = times[0]
5485                times = times - starttime  #Store relative times
5486        else:
5487            number_of_times = 0
5488            starttime = times
5489
5490        outfile.starttime = starttime
5491
5492        # Dimension definitions
5493        outfile.createDimension('number_of_points', number_of_points)
5494        outfile.createDimension('number_of_timesteps', number_of_times)
5495        outfile.createDimension('numbers_in_range', 2)
5496
5497        # Variable definitions
5498        outfile.createVariable('x', sts_precision, ('number_of_points',))
5499        outfile.createVariable('y', sts_precision, ('number_of_points',))
5500        outfile.createVariable('elevation', sts_precision,('number_of_points',))
5501
5502        q = 'elevation'
5503        outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5504                               ('numbers_in_range',))
5505
5506        # Initialise ranges with small and large sentinels.
5507        # If this was in pure Python we could have used None sensibly
5508        outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5509        outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5510
5511        outfile.createVariable('z', sts_precision, ('number_of_points',))
5512        # Doing sts_precision instead of Float gives cast errors.
5513        outfile.createVariable('time', Float, ('number_of_timesteps',))
5514
5515        for q in Write_sts.sts_quantities:
5516            outfile.createVariable(q, sts_precision,
5517                                   ('number_of_timesteps',
5518                                    'number_of_points'))
5519            outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5520                                   ('numbers_in_range',))
5521            # Initialise ranges with small and large sentinels.
5522            # If this was in pure Python we could have used None sensibly
5523            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5524            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5525
5526        if type(times) is list or type(times) is ArrayType: 
5527            outfile.variables['time'][:] = times    #Store time relative
5528
5529        if verbose:
5530            print '------------------------------------------------'
5531            print 'Statistics:'
5532            print '    t in [%f, %f], len(t) == %d'\
5533                  %(min(times.flat), max(times.flat), len(times.flat))
5534
5535    def store_points(self,
5536                     outfile,
5537                     points_utm,
5538                     elevation, zone=None, new_origin=None, 
5539                     points_georeference=None, verbose=False):
5540
5541        """
5542        points_utm - currently a list or array of the points in UTM.
5543        points_georeference - the georeference of the points_utm
5544
5545        How about passing new_origin and current_origin.
5546        If you get both, do a convertion from the old to the new.
5547       
5548        If you only get new_origin, the points are absolute,
5549        convert to relative
5550       
5551        if you only get the current_origin the points are relative, store
5552        as relative.
5553       
5554        if you get no georefs create a new georef based on the minimums of
5555        points_utm.  (Another option would be to default to absolute)
5556       
5557        Yes, and this is done in another part of the code.
5558        Probably geospatial.
5559       
5560        If you don't supply either geo_refs, then supply a zone. If not
5561        the default zone will be used.
5562
5563        precondition:
5564             header has been called.
5565        """
5566
5567        number_of_points = len(points_utm)
5568        points_utm = array(points_utm)
5569
5570        # given the two geo_refs and the points, do the stuff
5571        # described in the method header
5572        points_georeference = ensure_geo_reference(points_georeference)
5573        new_origin = ensure_geo_reference(new_origin)
5574       
5575        if new_origin is None and points_georeference is not None:
5576            points = points_utm
5577            geo_ref = points_georeference
5578        else:
5579            if new_origin is None:
5580                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5581                                           min(points_utm[:,1]))
5582            points = new_origin.change_points_geo_ref(points_utm,
5583                                                      points_georeference)
5584            geo_ref = new_origin
5585
5586        # At this stage I need a georef and points
5587        # the points are relative to the georef
5588        geo_ref.write_NetCDF(outfile)
5589
5590        x =  points[:,0]
5591        y =  points[:,1]
5592        z = outfile.variables['z'][:]
5593   
5594        if verbose:
5595            print '------------------------------------------------'
5596            print 'More Statistics:'
5597            print '  Extent (/lon):'
5598            print '    x in [%f, %f], len(lat) == %d'\
5599                  %(min(x), max(x),
5600                    len(x))
5601            print '    y in [%f, %f], len(lon) == %d'\
5602                  %(min(y), max(y),
5603                    len(y))
5604            print '    z in [%f, %f], len(z) == %d'\
5605                  %(min(elevation), max(elevation),
5606                    len(elevation))
5607            print 'geo_ref: ',geo_ref
5608            print '------------------------------------------------'
5609           
5610        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5611        #print "points[:,0]", points[:,0]
5612        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5613        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5614        outfile.variables['z'][:] = elevation
5615        outfile.variables['elevation'][:] = elevation  #FIXME HACK4
5616
5617        q = 'elevation'
5618        # This updates the _range values
5619        outfile.variables[q+Write_sts.RANGE][0] = min(elevation)
5620        outfile.variables[q+Write_sts.RANGE][1] = max(elevation)
5621
5622    def store_quantities(self, outfile, sts_precision=Float32,
5623                         slice_index=None, time=None,
5624                         verbose=False, **quant):
5625       
5626        """
5627        Write the quantity info.
5628
5629        **quant is extra keyword arguments passed in. These must be
5630          the sts quantities, currently; stage.
5631       
5632        if the time array is already been built, use the slice_index
5633        to specify the index.
5634       
5635        Otherwise, use time to increase the time dimension
5636
5637        Maybe make this general, but the viewer assumes these quantities,
5638        so maybe we don't want it general - unless the viewer is general
5639       
5640        precondition:
5641            triangulation and
5642            header have been called.
5643        """
5644        if time is not None:
5645            file_time = outfile.variables['time']
5646            slice_index = len(file_time)
5647            file_time[slice_index] = time   
5648
5649        # Write the conserved quantities from Domain.
5650        # Typically stage,  xmomentum, ymomentum
5651        # other quantities will be ignored, silently.
5652        # Also write the ranges: stage_range
5653        for q in Write_sts.sts_quantities:
5654            if not quant.has_key(q):
5655                msg = 'STS file can not write quantity %s' %q
5656                raise NewQuantity, msg
5657            else:
5658                q_values = quant[q]
5659                outfile.variables[q][slice_index] = \
5660                                q_values.astype(sts_precision)
5661
5662                # This updates the _range values
5663                q_range = outfile.variables[q+Write_sts.RANGE][:]
5664                q_values_min = min(q_values)
5665                if q_values_min < q_range[0]:
5666                    outfile.variables[q+Write_sts.RANGE][0] = q_values_min
5667                q_values_max = max(q_values)
5668                if q_values_max > q_range[1]:
5669                    outfile.variables[q+Write_sts.RANGE][1] = q_values_max
5670
5671
5672   
[4271]5673class Urs_points:
5674    """
5675    Read the info in URS mux files.
[4268]5676
[4271]5677    for the quantities heres a correlation between the file names and
5678    what they mean;
5679    z-mux is height above sea level, m
5680    e-mux is velocity is Eastern direction, m/s
[4280]5681    n-mux is velocity is Northern direction, m/s   
[4271]5682    """
[4268]5683    def __init__(self,urs_file):
[4271]5684        self.iterated = False
[4268]5685        columns = 3 # long, lat , depth
5686        mux_file = open(urs_file, 'rb')
5687       
5688        # Number of points/stations
5689        (self.points_num,)= unpack('i',mux_file.read(4))
5690       
5691        # nt, int - Number of time steps
5692        (self.time_step_count,)= unpack('i',mux_file.read(4))
[4280]5693        #print "self.time_step_count", self.time_step_count
[4268]5694        #dt, float - time step, seconds
5695        (self.time_step,) = unpack('f', mux_file.read(4))
[4280]5696        #print "self.time_step", self.time_step
[4268]5697        msg = "Bad data in the urs file."
5698        if self.points_num < 0:
5699            mux_file.close()
5700            raise ANUGAError, msg
5701        if self.time_step_count < 0:
5702            mux_file.close()
5703            raise ANUGAError, msg
5704        if self.time_step < 0:
5705            mux_file.close()
5706            raise ANUGAError, msg
[4271]5707
5708        # the depth is in meters, and it is the distance from the ocean
5709        # to the sea bottom.
[4268]5710        lonlatdep = p_array.array('f')
5711        lonlatdep.read(mux_file, columns * self.points_num)
5712        lonlatdep = array(lonlatdep, typecode=Float)   
5713        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5714        #print 'lonlatdep',lonlatdep
5715        self.lonlatdep = lonlatdep
[4271]5716       
5717        self.mux_file = mux_file
[4268]5718        # check this array
5719
5720    def __iter__(self):
5721        """
[4271]5722        iterate over quantity data which is with respect to time.
5723
5724        Note: You can only interate once over an object
5725       
5726        returns quantity infomation for each time slice
[4268]5727        """
[4271]5728        msg =  "You can only interate once over a urs file."
5729        assert not self.iterated, msg
[4280]5730        self.iter_time_step = 0
[4271]5731        self.iterated = True
[4268]5732        return self
5733   
5734    def next(self):
[4280]5735        if self.time_step_count == self.iter_time_step:
[4271]5736            self.close()
[4268]5737            raise StopIteration
5738        #Read in a time slice  from mux file 
5739        hz_p_array = p_array.array('f')
[4271]5740        hz_p_array.read(self.mux_file, self.points_num)
[4268]5741        hz_p = array(hz_p_array, typecode=Float)
[4280]5742        self.iter_time_step += 1
[4271]5743       
5744        return hz_p
[4268]5745
5746    def close(self):
5747        self.mux_file.close()
[4500]5748       
[4223]5749    #### END URS UNGRIDDED 2 SWW ###
5750
[4500]5751       
[5070]5752def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',
[4924]5753                         verbose=True):
[5070]5754    """
[4500]5755    Used to store screen output and errors to file, if run on multiple
5756    processes eachprocessor will have its own output and error file.
5757   
5758    extra_info - is used as a string that can identify outputs with another
5759    string eg. '_other'
[4850]5760   
5761    FIXME: Would be good if you could suppress all the screen output and
[5070]5762    only save it to file... however it seems a bit tricky as this capture
[4850]5763    techique response to sys.stdout and by this time it is already printed out.
[5070]5764    """
[4850]5765   
[4500]5766    import sys
[5070]5767#    dir_name = dir_name
5768    if dir_name == None:
5769        dir_name=getcwd()
5770       
[4500]5771    if access(dir_name,W_OK) == 0:
[4850]5772        if verbose: print 'Making directory %s' %dir_name
5773      #  if verbose: print "myid", myid
[4500]5774        mkdir (dir_name,0777)
[5070]5775
[4500]5776    if myid <>'':
5777        myid = '_'+str(myid)
5778    if numprocs <>'':
5779        numprocs = '_'+str(numprocs)
5780    if extra_info <>'':
5781        extra_info = '_'+str(extra_info)
[4924]5782#    print 'hello1'
[5070]5783    screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %(myid,
[4665]5784                                                                numprocs,
[5070]5785                                                                extra_info))
5786    screen_error_name = join(dir_name,  "screen_error%s%s%s.txt" %(myid,
[4665]5787                                                              numprocs,
[5070]5788                                                              extra_info))
[4924]5789
5790    if verbose: print 'Starting ScreenCatcher, all output will be stored in %s' \
5791                                     %(screen_output_name)
[4500]5792    #used to catch screen output to file
5793    sys.stdout = Screen_Catcher(screen_output_name)
5794    sys.stderr = Screen_Catcher(screen_error_name)
5795
5796class Screen_Catcher:
5797    """this simply catches the screen output and stores it to file defined by
5798    start_screen_catcher (above)
5799    """
5800   
5801    def __init__(self, filename):
5802        self.filename = filename
[4924]5803#        print 'init'
[4500]5804        if exists(self.filename)is True:
5805            print'Old existing file "%s" has been deleted' %(self.filename)
5806            remove(self.filename)
5807
5808    def write(self, stuff):
5809        fid = open(self.filename, 'a')
5810        fid.write(stuff)
[4924]5811        fid.close()
5812       
[5391]5813# FIXME (DSG): Add unit test, make general, not just 2 files,
5814# but any number of files.
[4850]5815def copy_code_files(dir_name, filename1, filename2=None):
[4500]5816    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5817    information management
5818    filename1 and filename2 are both absolute pathnames   
5819    """
5820
5821    if access(dir_name,F_OK) == 0:
5822        print 'Make directory %s' %dir_name
5823        mkdir (dir_name,0777)
5824    shutil.copy(filename1, dir_name + sep + basename(filename1))
[4850]5825    if filename2!=None:
5826        shutil.copy(filename2, dir_name + sep + basename(filename2))
5827        print 'Files %s and %s copied' %(filename1, filename2)
5828    else:
5829        print 'File %s copied' %(filename1)
[4500]5830
5831def get_data_from_file(filename,separator_value = ','):
5832    """
5833    Read in data information from file and
5834   
5835    Returns:
5836        header_fields, a string? of the first line separated
5837        by the 'separator_value'
5838       
5839        data, a array (N data columns X M lines) in the file
5840        excluding the header
5841       
5842    NOTE: wont deal with columns with different lenghts and there must be
5843    no blank lines at the end.
5844    """
5845   
5846    fid = open(filename)
5847    lines = fid.readlines()
5848   
5849    fid.close()
5850   
5851    header_line = lines[0]
5852    header_fields = header_line.split(separator_value)
5853
5854    #array to store data, number in there is to allow float...
5855    #i'm sure there is a better way!
5856    data=array([],typecode=Float)
5857    data=resize(data,((len(lines)-1),len(header_fields)))
5858#    print 'number of fields',range(len(header_fields))
5859#    print 'number of lines',len(lines), shape(data)
5860#    print'data',data[1,1],header_line
5861
5862    array_number = 0
5863    line_number = 1
5864    while line_number < (len(lines)):
5865        for i in range(len(header_fields)): 
5866            #this get line below the header, explaining the +1
5867            #and also the line_number can be used as the array index
5868            fields = lines[line_number].split(separator_value)
5869            #assign to array
5870            data[array_number,i] = float(fields[i])
5871           
5872        line_number = line_number +1
5873        array_number = array_number +1
5874       
5875    return header_fields, data
5876
5877def store_parameters(verbose=False,**kwargs):
5878    """
[4519]5879    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5880    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5881   
[4500]5882    Must have a file_name keyword arg, this is what is writing to.
5883    might be a better way to do this using CSV module Writer and writeDict
5884   
[4665]5885    writes file to "output_dir" unless "completed" is in kwargs, then
5886    it writes to "file_name" kwargs
[4519]5887
[4500]5888    """
5889    import types
[4595]5890#    import os
[4500]5891   
5892    # Check that kwargs is a dictionary
5893    if type(kwargs) != types.DictType:
5894        raise TypeError
5895   
[4519]5896    #is completed is kwargs?
[4500]5897    try:
5898        kwargs['completed']
5899        completed=True
5900    except:
5901        completed=False
5902 
5903    #get file name and removes from dict and assert that a file_name exists
5904    if completed:
5905        try:
[4519]5906            file = str(kwargs['file_name'])
[4500]5907        except:
5908            raise 'kwargs must have file_name'
5909    else:
[4519]5910        #write temp file in output directory
[4500]5911        try:
[4519]5912            file = str(kwargs['output_dir'])+'detail_temp.csv'
[4500]5913        except:
5914            raise 'kwargs must have output_dir'
5915       
5916    #extracts the header info and the new line info
5917    line=''
5918    header=''
5919    count=0
5920    keys = kwargs.keys()
5921    keys.sort()
5922   
5923    #used the sorted keys to create the header and line data
5924    for k in keys:
[4519]5925#        print "%s = %s" %(k, kwargs[k])
[4500]5926        header = header+str(k)
5927        line = line+str(kwargs[k])
5928        count+=1
5929        if count <len(kwargs):
5930            header = header+','
5931            line = line+','
[4519]5932    header+='\n'
5933    line+='\n'
[4500]5934
5935    # checks the header info, if the same, then write, if not create a new file
5936    #try to open!
5937    try:
5938        fid = open(file,"r")
5939        file_header=fid.readline()
5940        fid.close()
5941        if verbose: print 'read file header %s' %file_header
5942       
5943    except:
5944        msg = 'try to create new file',file
5945        if verbose: print msg
5946        #tries to open file, maybe directory is bad
5947        try:
5948            fid = open(file,"w")
[4519]5949            fid.write(header)
[4500]5950            fid.close()
5951            file_header=header
5952        except:
5953            msg = 'cannot create new file',file
5954            raise msg
5955           
5956    #if header is same or this is a new file
[4519]5957    if file_header==str(header):
[4500]5958        fid=open(file,"a")
5959        #write new line
[4519]5960        fid.write(line)
[4500]5961        fid.close()
5962    else:
[4665]5963        #backup plan,
5964        # if header is different and has completed will append info to
[4500]5965        #end of details_temp.cvs file in output directory
5966        file = str(kwargs['output_dir'])+'detail_temp.csv'
5967        fid=open(file,"a")
[4519]5968        fid.write(header)
5969        fid.write(line)
[4500]5970        fid.close()
[4519]5971        if verbose: print 'file',file_header.strip('\n')
5972        if verbose: print 'head',header.strip('\n')
5973        if file_header.strip('\n')==str(header): print 'they equal'
[4500]5974        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5975        print msg
5976
5977
[4551]5978
[5226]5979# ----------------------------------------------
5980# Functions to obtain diagnostics from sww files
5981#-----------------------------------------------
[4551]5982
[5276]5983def get_mesh_and_quantities_from_file(filename,
5984                                      quantities=None,
5985                                      verbose=False):
[5226]5986    """Get and rebuild mesh structure and the associated quantities from sww file
[5276]5987
5988    Input:
5989        filename - Name os sww file
5990        quantities - Names of quantities to load
5991
5992    Output:
5993        mesh - instance of class Interpolate
5994               (including mesh and interpolation functionality)
5995        quantities - arrays with quantity values at each mesh node
5996        time - vector of stored timesteps
[5226]5997    """
[5276]5998 
5999    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
6000   
[5226]6001    import types
[5276]6002    from Scientific.IO.NetCDF import NetCDFFile
6003    from shallow_water import Domain
6004    from Numeric import asarray, transpose, resize
6005    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
[5226]6006
[5276]6007    if verbose: print 'Reading from ', filename
6008    fid = NetCDFFile(filename, 'r')    # Open existing file for read
[5307]6009    time = fid.variables['time'][:]    # Time vector
[5276]6010    time += fid.starttime[0]
[5226]6011   
[5276]6012    # Get the variables as Numeric arrays
6013    x = fid.variables['x'][:]                   # x-coordinates of nodes
6014    y = fid.variables['y'][:]                   # y-coordinates of nodes
6015    elevation = fid.variables['elevation'][:]   # Elevation
6016    stage = fid.variables['stage'][:]           # Water level
6017    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
6018    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
[5226]6019
6020
[5276]6021    # Mesh (nodes (Mx2), triangles (Nx3))
6022    nodes = concatenate( (x[:, NewAxis], y[:, NewAxis]), axis=1 )
6023    triangles = fid.variables['volumes'][:]
6024   
6025    # Get geo_reference
6026    try:
6027        geo_reference = Geo_reference(NetCDFObject=fid)
6028    except: #AttributeError, e:
6029        # Sww files don't have to have a geo_ref
6030        geo_reference = None
[5226]6031
[5276]6032    if verbose: print '    building mesh from sww file %s' %filename
6033    boundary = None
[5226]6034
[5276]6035    #FIXME (Peter Row): Should this be in mesh?
6036    if fid.smoothing != 'Yes':
6037        nodes = nodes.tolist()
6038        triangles = triangles.tolist()
6039        nodes, triangles, boundary=weed(nodes, triangles, boundary)
[5226]6040
6041
6042    try:
[5276]6043        mesh = Mesh(nodes, triangles, boundary,
6044                    geo_reference=geo_reference)
6045    except AssertionError, e:
6046        fid.close()
6047        msg = 'Domain could not be created: %s. "' %e
6048        raise DataDomainError, msg
[5226]6049
6050
6051    quantities = {}
[5276]6052    quantities['elevation'] = elevation
6053    quantities['stage'] = stage   
6054    quantities['xmomentum'] = xmomentum
6055    quantities['ymomentum'] = ymomentum   
[5336]6056
6057    fid.close()
[5276]6058    return mesh, quantities, time
[5226]6059
6060
6061
6062def get_flow_through_cross_section(filename,
6063                                   polyline,
6064                                   verbose=False):
[5276]6065    """Obtain flow (m^3/s) perpendicular to specified cross section.
[5226]6066
6067    Inputs:
6068        filename: Name of sww file
6069        polyline: Representation of desired cross section - it may contain multiple
[5288]6070                  sections allowing for complex shapes. Assume absolute UTM coordinates.
6071                  Format [[x0, y0], [x1, y1], ...]
[5226]6072
6073    Output:
[5288]6074        time: All stored times in sww file
[5276]6075        Q: Hydrograph of total flow across given segments for all stored times.
[5226]6076
[5276]6077    The normal flow is computed for each triangle intersected by the polyline and
[5288]6078    added up.  Multiple segments at different angles are specified the normal flows
[5276]6079    may partially cancel each other.
[5226]6080
[5288]6081    The typical usage of this function would be to get flow through a channel,
6082    and the polyline would then be a cross section perpendicular to the flow.
6083
[5226]6084    """
6085
[5288]6086    from anuga.fit_interpolate.interpolate import Interpolation_function
6087
6088    quantity_names =['elevation',
6089                     'stage',
6090                     'xmomentum',
6091                     'ymomentum']
6092
[5226]6093    # Get mesh and quantities from sww file
[5276]6094    X = get_mesh_and_quantities_from_file(filename,
[5288]6095                                          quantities=quantity_names,
[5276]6096                                          verbose=verbose)
6097    mesh, quantities, time = X
[5226]6098
6099
[5288]6100    # Adjust polyline to mesh spatial origin
6101    polyline = mesh.geo_reference.get_relative(polyline)
[5226]6102   
6103    # Find all intersections and associated triangles.
[5321]6104    segments = mesh.get_intersecting_segments(polyline, verbose=verbose)
[5288]6105    #print
6106    #for x in segments:
6107    #    print x
6108
[5226]6109   
6110    # Then store for each triangle the length of the intersecting segment(s),
[5288]6111    # right hand normal(s) and midpoints as interpolation_points.
6112    interpolation_points = []
6113    for segment in segments:
6114        midpoint = sum(array(segment.segment))/2
6115        interpolation_points.append(midpoint)
[5226]6116
[5288]6117
[5321]6118    if verbose:
6119        print 'Interpolating - ',       
6120        print 'total number of interpolation points = %d'\
6121              %len(interpolation_points)
6122
[5288]6123    I = Interpolation_function(time,
6124                               quantities,
6125                               quantity_names=quantity_names,
6126                               vertex_coordinates=mesh.nodes,
6127                               triangles=mesh.triangles,
6128                               interpolation_points=interpolation_points,
6129                               verbose=verbose)
6130
[5321]6131    if verbose: print 'Computing hydrograph'
[5288]6132    # Compute hydrograph
6133    Q = []
6134    for t in time:
6135        total_flow=0
6136        for i, p in enumerate(interpolation_points):
6137            elevation, stage, uh, vh = I(t, point_id=i)
6138            normal = segments[i].normal
6139
6140            # Inner product of momentum vector with segment normal [m^2/s]
6141            normal_momentum = uh*normal[0] + vh*normal[1] 
6142
6143            # Flow across this segment [m^3/s]
6144            segment_flow = normal_momentum*segments[i].length
6145
6146            # Accumulate
6147            total_flow += segment_flow
6148             
6149
6150        # Store flow at this timestep   
6151        Q.append(total_flow)
6152   
6153
6154
[5276]6155    return time, Q
6156   
[5226]6157
6158
[5276]6159
6160
[4554]6161def get_maximum_inundation_elevation(filename,
6162                                     polygon=None,
6163                                     time_interval=None,
6164                                     verbose=False):
6165   
6166    """Return highest elevation where depth > 0
6167   
6168    Usage:
6169    max_runup = get_maximum_inundation_elevation(filename,
6170                                                 polygon=None,
6171                                                 time_interval=None,
6172                                                 verbose=False)
6173
[4665]6174    filename is a NetCDF sww file containing ANUGA model output.   
6175    Optional arguments polygon and time_interval restricts the maximum
6176    runup calculation
[4554]6177    to a points that lie within the specified polygon and time interval.
6178
6179    If no inundation is found within polygon and time_interval the return value
6180    is None signifying "No Runup" or "Everything is dry".
6181
6182    See general function get_maximum_inundation_data for details.
6183   
6184    """
6185   
6186    runup, _ = get_maximum_inundation_data(filename,
6187                                           polygon=polygon,
6188                                           time_interval=time_interval,
6189                                           verbose=verbose)
6190    return runup
6191
6192
6193
6194
6195def get_maximum_inundation_location(filename,
6196                                    polygon=None,
6197                                    time_interval=None,
6198                                    verbose=False):
6199    """Return location of highest elevation where h > 0
6200   
6201   
6202    Usage:
6203    max_runup_location = get_maximum_inundation_location(filename,
6204                                                         polygon=None,
6205                                                         time_interval=None,
6206                                                         verbose=False)
6207
6208    filename is a NetCDF sww file containing ANUGA model output.
[4665]6209    Optional arguments polygon and time_interval restricts the maximum
6210    runup calculation
[4554]6211    to a points that lie within the specified polygon and time interval.
6212
6213    If no inundation is found within polygon and time_interval the return value
6214    is None signifying "No Runup" or "Everything is dry".
6215
6216    See general function get_maximum_inundation_data for details.
6217    """
6218   
6219    _, max_loc = get_maximum_inundation_data(filename,
6220                                             polygon=polygon,
6221                                             time_interval=time_interval,
6222                                             verbose=verbose)
6223    return max_loc
6224   
6225
6226
6227def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
[4567]6228                                use_centroid_values=False,
[4554]6229                                verbose=False):
[4551]6230    """Compute maximum run up height from sww file.
6231
[4554]6232
6233    Usage:
6234    runup, location = get_maximum_inundation_data(filename,
6235                                                  polygon=None,
6236                                                  time_interval=None,
6237                                                  verbose=False)
6238   
6239
[4665]6240    Algorithm is as in get_maximum_inundation_elevation from
6241    shallow_water_domain
[4551]6242    except that this function works with the sww file and computes the maximal
6243    runup height over multiple timesteps.
6244   
[4665]6245    Optional arguments polygon and time_interval restricts the
6246    maximum runup calculation
6247    to a points that lie within the specified polygon and time interval.
6248    Polygon is
[4563]6249    assumed to be in (absolute) UTM coordinates in the same zone as domain.
[4554]6250
6251    If no inundation is found within polygon and time_interval the return value
6252    is None signifying "No Runup" or "Everything is dry".
[4551]6253    """
6254
6255    # We are using nodal values here as that is what is stored in sww files.
6256
6257    # Water depth below which it is considered to be 0 in the model
6258    # FIXME (Ole): Allow this to be specified as a keyword argument as well
[4554]6259
6260    from anuga.utilities.polygon import inside_polygon   
[4551]6261    from anuga.config import minimum_allowed_height
[4595]6262    from Scientific.IO.NetCDF import NetCDFFile
[4551]6263
[4595]6264    dir, base = os.path.split(filename)
6265           
6266    iterate_over = get_all_swwfiles(dir,base)
[4551]6267   
6268    # Read sww file
6269    if verbose: 
[4554]6270        print 'Reading from %s' %filename
[4563]6271        # FIXME: Use general swwstats (when done)
[4551]6272   
[4595]6273    maximal_runup = None
6274    maximal_runup_location = None
[4563]6275   
[4595]6276    for file, swwfile in enumerate (iterate_over):
[4554]6277       
[4595]6278        # Read sww file
6279        filename = join(dir,swwfile+'.sww')
[4554]6280       
[4595]6281        if verbose: 
6282            print 'Reading from %s' %filename
6283            # FIXME: Use general swwstats (when done)
6284               
6285        fid = NetCDFFile(filename)
6286   
6287        # Get geo_reference
6288        # sww files don't have to have a geo_ref
6289        try:
6290            geo_reference = Geo_reference(NetCDFObject=fid)
6291        except AttributeError, e:
6292            geo_reference = Geo_reference() # Default georef object
6293           
6294        xllcorner = geo_reference.get_xllcorner()
6295        yllcorner = geo_reference.get_yllcorner()
6296        zone = geo_reference.get_zone()
[4554]6297       
[4595]6298        # Get extent
6299        volumes = fid.variables['volumes'][:]   
6300        x = fid.variables['x'][:] + xllcorner
6301        y = fid.variables['y'][:] + yllcorner
[4551]6302   
[4595]6303   
[4863]6304        # Get the relevant quantities (Convert from single precison)
6305        elevation = array(fid.variables['elevation'][:], Float) 
6306        stage = array(fid.variables['stage'][:], Float)
[4595]6307   
6308   
[4665]6309        # Here's where one could convert nodal information to centroid
6310        # information
[4595]6311        # but is probably something we need to write in C.
6312        # Here's a Python thought which is NOT finished!!!
[4567]6313        if use_centroid_values is True:
[4595]6314            x = get_centroid_values(x, volumes)
6315            y = get_centroid_values(y, volumes)   
6316            elevation = get_centroid_values(elevation, volumes)   
6317   
6318   
6319        # Spatial restriction
6320        if polygon is not None:
6321            msg = 'polygon must be a sequence of points.'
6322            assert len(polygon[0]) == 2, msg
[4665]6323            # FIXME (Ole): Make a generic polygon input check in polygon.py
6324            # and call it here
[4595]6325           
6326            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
6327   
6328            point_indices = inside_polygon(points, polygon)
6329   
6330            # Restrict quantities to polygon
6331            elevation = take(elevation, point_indices)
6332            stage = take(stage, point_indices, axis=1)
6333   
6334            # Get info for location of maximal runup
6335            points_in_polygon = take(points, point_indices)
6336            x = points_in_polygon[:,0]
6337            y = points_in_polygon[:,1]       
[4567]6338        else:
[4595]6339            # Take all points
6340            point_indices = arange(len(x))
[4567]6341           
[4551]6342   
[4595]6343        # Temporal restriction
6344        time = fid.variables['time'][:]
6345        all_timeindices = arange(len(time))       
6346        if time_interval is not None:
6347           
6348            msg = 'time_interval must be a sequence of length 2.'
6349            assert len(time_interval) == 2, msg
[4665]6350            msg = 'time_interval %s must not be decreasing.' %(time_interval)
[4595]6351            assert time_interval[1] >= time_interval[0], msg
6352           
6353            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
6354            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
6355                   %(time[0], time[-1])
6356            if time_interval[1] < time[0]: raise ValueError(msg)
6357            if time_interval[0] > time[-1]: raise ValueError(msg)
6358   
6359            # Take time indices corresponding to interval (& is bitwise AND)
6360            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
6361                                 all_timeindices)
6362   
6363   
6364            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
6365            assert not alltrue(timesteps == 0), msg
6366   
6367   
6368        else:
6369            # Take them all
6370            timesteps = all_timeindices
6371       
6372   
6373        fid.close()
6374   
6375        # Compute maximal runup for each timestep
6376        #maximal_runup = None
6377        #maximal_runup_location = None
6378        #maximal_runups = [None]
6379        #maximal_runup_locations = [None]
6380       
6381        for i in timesteps:
6382            if use_centroid_values is True:
[4665]6383                stage_i = get_centroid_values(stage[i,:], volumes)   
[4595]6384            else:
6385                stage_i = stage[i,:]
6386               
6387            depth = stage_i  - elevation
6388       
6389            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
6390            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
6391   
6392            if alltrue(wet_nodes == 0):
6393                runup = None
6394            else:   
6395                # Find maximum elevation among wet nodes
6396                wet_elevation = take(elevation, wet_nodes)
6397   
6398                runup_index = argmax(wet_elevation)
6399                runup = max(wet_elevation)
[4862]6400                assert wet_elevation[runup_index] == runup # Must be True
6401            #print "runup", runup
6402            #print "maximal_runup", maximal_runup
6403           
[4595]6404            if runup > maximal_runup:
6405                maximal_runup = runup      # This works even if maximal_runups is None
6406                #print "NEW RUNUP",runup
6407   
6408                # Record location
6409                wet_x = take(x, wet_nodes)
6410                wet_y = take(y, wet_nodes)           
6411                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
6412   
6413    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
[4554]6414    return maximal_runup, maximal_runup_location
[4551]6415
[4586]6416def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
6417    '''
[4595]6418    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6419    will accept base_name with or without the extension ".sww"
[4586]6420   
6421    Returns: a list of strings
[4595]6422       
6423    Usage:     iterate_over = get_all_swwfiles(dir, name)
6424    then
6425               for swwfile in iterate_over:
6426                   do stuff
6427                   
6428    Check "export_grids" and "get_maximum_inundation_data" for examples
[4586]6429    '''
[4595]6430   
6431    #plus tests the extension
6432    name, extension = os.path.splitext(base_name)
6433
6434    if extension <>'' and extension <> '.sww':
6435        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
6436        raise IOError, msg
6437
[4586]6438    if look_in_dir == "":
6439        look_in_dir = "." # Unix compatibility
6440   
6441    dir_ls = os.listdir(look_in_dir)
6442    #print 'dir_ls',dir_ls, base
[4595]6443    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
[4586]6444    if len(iterate_over) == 0:
6445        msg = 'No files of the base name %s'\
[4595]6446              %(name)
[4586]6447        raise IOError, msg
6448    if verbose: print 'iterate over %s' %(iterate_over)
[4554]6449
[4586]6450    return iterate_over
6451
[4636]6452def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
6453    '''
6454    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6455   
6456   
6457    Returns: a list of strings
6458       
6459    Usage:     iterate_over = get_all_swwfiles(dir, name)
6460    then
6461               for swwfile in iterate_over:
6462                   do stuff
6463                   
6464    Check "export_grids" and "get_maximum_inundation_data" for examples
6465    '''
6466   
6467    #plus tests the extension
6468    name, ext = os.path.splitext(base_name)
6469#    print 'look_in_dir',look_in_dir
[4586]6470
[4636]6471    if ext <>'' and ext <> extension:
6472        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
6473        raise IOError, msg
[4598]6474
[4636]6475    if look_in_dir == "":
6476        look_in_dir = "." # Unix compatibility
6477#    print 'look_in_dir',look_in_dir, getcwd()
6478    dir_ls = os.listdir(look_in_dir)
6479    #print 'dir_ls',dir_ls, base_name
6480    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
6481    if len(iterate_over) == 0:
6482        msg = 'No files of the base name %s in %s'\
6483              %(name, look_in_dir)
6484        raise IOError, msg
6485    if verbose: print 'iterate over %s' %(iterate_over)
6486
6487    return iterate_over
6488
6489def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
6490    '''
6491    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6492   
6493   
6494    Returns: a list of strings
6495       
6496    Usage:     iterate_over = get_all_swwfiles(dir, name)
6497    then
6498               for swwfile in iterate_over:
6499                   do stuff
6500                   
6501    Check "export_grids" and "get_maximum_inundation_data" for examples
6502    '''
6503   
6504    #plus tests the extension
6505
6506    if look_in_dir == "":
6507        look_in_dir = "." # Unix compatibility
6508#    print 'look_in_dir',look_in_dir
6509    dir_ls = os.listdir(look_in_dir)
6510#    print 'dir_ls',dir_ls
6511    iterate_over = [x for x in dir_ls if base_name in x]
6512    if len(iterate_over) == 0:
6513        msg = 'No files of the base name %s'\
6514              %(name)
6515        raise IOError, msg
6516    if verbose: print 'iterate over %s' %(iterate_over)
6517
6518    return iterate_over
6519
[5189]6520def points2polygon(points_file,
6521                    minimum_triangle_angle=3.0):
6522    """
6523    WARNING: This function is not fully working. 
6524   
6525    Function to return a polygon returned from alpha shape, given a points file.
6526   
6527    WARNING: Alpha shape returns multiple polygons, but this function only returns one polygon.
6528   
6529    """
6530    from anuga.pmesh.mesh import Mesh, importMeshFromFile
6531    from anuga.shallow_water import Domain   
6532    from anuga.pmesh.mesh_interface import create_mesh_from_regions
6533   
6534    mesh = importMeshFromFile(points_file)
6535    mesh.auto_segment()
6536    mesh.exportASCIIsegmentoutlinefile("outline.tsh")
6537    mesh2 = importMeshFromFile("outline.tsh")
6538    mesh2.generate_mesh(maximum_triangle_area=1000000000, minimum_triangle_angle=minimum_triangle_angle, verbose=False)
6539    mesh2.export_mesh_file('outline_meshed.tsh')
6540    domain = Domain("outline_meshed.tsh", use_cache = False)
6541    polygon =  domain.get_boundary_polygon()
6542    return polygon
[4636]6543
[3292]6544#-------------------------------------------------------------
[4500]6545if __name__ == "__main__":
6546    #setting umask from config to force permissions for all files and directories
6547    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
6548    # set in your .bashrc etc file)
6549    from config import umask
6550    import os 
6551    os.umask(umask)
Note: See TracBrowser for help on using the repository browser.