source: anuga_core/source/anuga/shallow_water/data_manager_joaquims_patch.py @ 5421

Last change on this file since 5421 was 4814, checked in by ole, 16 years ago

Added separate file with Patch suggested by Joaquim Luis.
When we have a test for this, it can be incorporated in the
real data_manager.

File size: 188.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.csv: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57class NewQuantity(exceptions.Exception): pass
58
59
60
61import csv
62import os, sys
63import shutil
64from struct import unpack
65import array as p_array
66#import time, os
67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
68
69
70from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
71     searchsorted, zeros, allclose, around, reshape, transpose, sort, \
72     NewAxis, ArrayType, compress, take, arange, argmax, alltrue,shape,Float
73
74import string
75
76from Scientific.IO.NetCDF import NetCDFFile
77#from shutil import copy
78from os.path import exists, basename, join
79from os import getcwd
80
81
82from anuga.coordinate_transforms.redfearn import redfearn, \
83     convert_from_latlon_to_utm
84from anuga.coordinate_transforms.geo_reference import Geo_reference, \
85     write_NetCDF_georeference, ensure_geo_reference
86from anuga.geospatial_data.geospatial_data import Geospatial_data,\
87     ensure_absolute
88from anuga.config import minimum_storable_height as default_minimum_storable_height
89from anuga.config import max_float
90from anuga.utilities.numerical_tools import ensure_numeric,  mean
91from anuga.caching.caching import myhash
92from anuga.utilities.anuga_exceptions import ANUGAError
93from anuga.shallow_water import Domain
94from anuga.abstract_2d_finite_volumes.pmesh2domain import \
95     pmesh_to_domain_instance
96from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
97     remove_lone_verts, sww2timeseries, get_centroid_values
98from anuga.load_mesh.loadASCII import export_mesh_file
99# formula mappings
100
101quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
102                    'depth':'stage-elevation',
103                    'speed': \
104 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
105
106
107   
108def make_filename(s):
109    """Transform argument string into a Sexsuitable filename
110    """
111
112    s = s.strip()
113    s = s.replace(' ', '_')
114    s = s.replace('(', '')
115    s = s.replace(')', '')
116    s = s.replace('__', '_')
117
118    return s
119
120
121def check_dir(path, verbose=None):
122    """Check that specified path exists.
123    If path does not exist it will be created if possible
124
125    USAGE:
126       checkdir(path, verbose):
127
128    ARGUMENTS:
129        path -- Directory
130        verbose -- Flag verbose output (default: None)
131
132    RETURN VALUE:
133        Verified path including trailing separator
134
135    """
136
137    import os.path
138
139    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
140        unix = 0
141    else:
142        unix = 1
143
144
145    if path[-1] != os.sep:
146        path = path + os.sep  # Add separator for directories
147
148    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
149    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
150        try:
151            exitcode=os.mkdir(path)
152
153            # Change access rights if possible
154            #
155            if unix:
156                exitcode=os.system('chmod 775 '+path)
157            else:
158                pass  # FIXME: What about acces rights under Windows?
159
160            if verbose: print 'MESSAGE: Directory', path, 'created.'
161
162        except:
163            print 'WARNING: Directory', path, 'could not be created.'
164            if unix:
165                path = '/tmp/'
166            else:
167                path = 'C:'
168
169            print 'Using directory %s instead' %path
170
171    return(path)
172
173
174
175def del_dir(path):
176    """Recursively delete directory path and all its contents
177    """
178
179    import os
180
181    if os.path.isdir(path):
182        for file in os.listdir(path):
183            X = os.path.join(path, file)
184
185
186            if os.path.isdir(X) and not os.path.islink(X):
187                del_dir(X)
188            else:
189                try:
190                    os.remove(X)
191                except:
192                    print "Could not remove file %s" %X
193
194        os.rmdir(path)
195       
196       
197# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
198def rmgeneric(path, __func__,verbose=False):
199    ERROR_STR= """Error removing %(path)s, %(error)s """
200
201    try:
202        __func__(path)
203        if verbose: print 'Removed ', path
204    except OSError, (errno, strerror):
205        print ERROR_STR % {'path' : path, 'error': strerror }
206           
207def removeall(path,verbose=False):
208
209    if not os.path.isdir(path):
210        return
211   
212    files=os.listdir(path)
213
214    for x in files:
215        fullpath=os.path.join(path, x)
216        if os.path.isfile(fullpath):
217            f=os.remove
218            rmgeneric(fullpath, f)
219        elif os.path.isdir(fullpath):
220            removeall(fullpath)
221            f=os.rmdir
222            rmgeneric(fullpath, f,verbose)
223
224
225
226def create_filename(datadir, filename, format, size=None, time=None):
227
228    import os
229    #from anuga.config import data_dir
230
231    FN = check_dir(datadir) + filename
232
233    if size is not None:
234        FN += '_size%d' %size
235
236    if time is not None:
237        FN += '_time%.2f' %time
238
239    FN += '.' + format
240    return FN
241
242
243def get_files(datadir, filename, format, size):
244    """Get all file (names) with given name, size and format
245    """
246
247    import glob
248
249    import os
250    #from anuga.config import data_dir
251
252    dir = check_dir(datadir)
253
254    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
255    return glob.glob(pattern)
256
257
258
259#Generic class for storing output to e.g. visualisation or checkpointing
260class Data_format:
261    """Generic interface to data formats
262    """
263
264
265    def __init__(self, domain, extension, mode = 'w'):
266        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
267                                        '''   'w' (write)'''+\
268                                        '''   'r' (read)''' +\
269                                        '''   'a' (append)'''
270
271        #Create filename
272        self.filename = create_filename(domain.get_datadir(),
273                                        domain.get_name(), extension)
274
275        #print 'F', self.filename
276        self.timestep = 0
277        self.domain = domain
278       
279
280
281        # Exclude ghosts in case this is a parallel domain
282        self.number_of_nodes = domain.number_of_full_nodes       
283        self.number_of_volumes = domain.number_of_full_triangles
284        #self.number_of_volumes = len(domain)       
285
286
287
288
289        #FIXME: Should we have a general set_precision function?
290
291
292
293#Class for storing output to e.g. visualisation
294class Data_format_sww(Data_format):
295    """Interface to native NetCDF format (.sww) for storing model output
296
297    There are two kinds of data
298
299    1: Constant data: Vertex coordinates and field values. Stored once
300    2: Variable data: Conserved quantities. Stored once per timestep.
301
302    All data is assumed to reside at vertex locations.
303    """
304
305
306    def __init__(self, domain, mode = 'w',\
307                 max_size = 2000000000,
308                 recursion = False):
309        from Scientific.IO.NetCDF import NetCDFFile
310        from Numeric import Int, Float, Float32
311
312        self.precision = Float32 #Use single precision for quantities
313        if hasattr(domain, 'max_size'):
314            self.max_size = domain.max_size #file size max is 2Gig
315        else:
316            self.max_size = max_size
317        self.recursion = recursion
318        self.mode = mode
319
320        Data_format.__init__(self, domain, 'sww', mode)
321
322        if hasattr(domain, 'minimum_storable_height'):
323            self.minimum_storable_height = domain.minimum_storable_height
324        else:
325            self.minimum_storable_height = default_minimum_storable_height
326
327        # NetCDF file definition
328        fid = NetCDFFile(self.filename, mode)
329
330        if mode == 'w':
331            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
332            self.writer = Write_sww()
333            self.writer.store_header(fid,
334                                     domain.starttime,
335                                     self.number_of_volumes,
336                                     self.domain.number_of_full_nodes,
337                                     description=description,
338                                     smoothing=domain.smooth,
339                                     order=domain.default_order)
340
341            # Extra optional information
342            if hasattr(domain, 'texture'):
343                fid.texture = domain.texture
344
345            if domain.quantities_to_be_monitored is not None:
346                fid.createDimension('singleton', 1)
347                fid.createDimension('two', 2)               
348
349                poly = domain.monitor_polygon
350                if poly is not None:
351                    N = len(poly)
352                    fid.createDimension('polygon_length', N)
353                    fid.createVariable('extrema.polygon',
354                                       self.precision,
355                                       ('polygon_length',
356                                        'two'))
357                    fid.variables['extrema.polygon'][:] = poly                                   
358
359                   
360                interval = domain.monitor_time_interval
361                if interval is not None:
362                    fid.createVariable('extrema.time_interval',
363                                       self.precision,
364                                       ('two',))
365                    fid.variables['extrema.time_interval'][:] = interval
366
367               
368                for q in domain.quantities_to_be_monitored:
369                    #print 'doing', q
370                    fid.createVariable(q+'.extrema', self.precision,
371                                       ('numbers_in_range',))
372                    fid.createVariable(q+'.min_location', self.precision,
373                                       ('numbers_in_range',))
374                    fid.createVariable(q+'.max_location', self.precision,
375                                       ('numbers_in_range',))
376                    fid.createVariable(q+'.min_time', self.precision,
377                                       ('singleton',))
378                    fid.createVariable(q+'.max_time', self.precision,
379                                       ('singleton',))
380
381                   
382        fid.close()
383
384
385    def store_connectivity(self):
386        """Specialisation of store_connectivity for net CDF format
387
388        Writes x,y,z coordinates of triangles constituting
389        the bed elevation.
390        """
391
392        from Scientific.IO.NetCDF import NetCDFFile
393
394        from Numeric import concatenate, Int
395
396        domain = self.domain
397
398        #Get NetCDF
399        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
400
401        # Get the variables
402        x = fid.variables['x']
403        y = fid.variables['y']
404        z = fid.variables['elevation']
405
406        volumes = fid.variables['volumes']
407
408        # Get X, Y and bed elevation Z
409        Q = domain.quantities['elevation']
410        X,Y,Z,V = Q.get_vertex_values(xy=True,
411                                      precision=self.precision)
412
413        #
414        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
415        self.writer.store_triangulation(fid,
416                                        points,
417                                        V.astype(volumes.typecode()),
418                                        Z,
419                                        points_georeference= \
420                                        domain.geo_reference)
421
422        # Close
423        fid.close()
424
425
426    def store_timestep(self, names):
427        """Store time and named quantities to file
428        """
429        from Scientific.IO.NetCDF import NetCDFFile
430        import types
431        from time import sleep
432        from os import stat
433
434        from Numeric import choose
435       
436        # Get NetCDF       
437        retries = 0
438        file_open = False
439        while not file_open and retries < 10:
440            try:
441                fid = NetCDFFile(self.filename, 'a') # Open existing file
442            except IOError:
443                # This could happen if someone was reading the file.
444                # In that case, wait a while and try again
445                msg = 'Warning (store_timestep): File %s could not be opened'\
446                      %self.filename
447                msg += ' - trying step %s again' %self.domain.time
448                print msg
449                retries += 1
450                sleep(1)
451            else:
452                file_open = True
453
454        if not file_open:
455            msg = 'File %s could not be opened for append' %self.filename
456            raise DataFileNotOpenError, msg
457
458
459
460        # Check to see if the file is already too big:
461        time = fid.variables['time']
462        i = len(time)+1
463        file_size = stat(self.filename)[6]
464        file_size_increase =  file_size/i
465        if file_size + file_size_increase > self.max_size*(2**self.recursion):
466            # In order to get the file name and start time correct,
467            # I change the domain.filename and domain.starttime.
468            # This is the only way to do this without changing
469            # other modules (I think).
470
471            # Write a filename addon that won't break swollens reader
472            # (10.sww is bad)
473            filename_ext = '_time_%s'%self.domain.time
474            filename_ext = filename_ext.replace('.', '_')
475           
476            # Remember the old filename, then give domain a
477            # name with the extension
478            old_domain_filename = self.domain.get_name()
479            if not self.recursion:
480                self.domain.set_name(old_domain_filename+filename_ext)
481
482
483            # Change the domain starttime to the current time
484            old_domain_starttime = self.domain.starttime
485            self.domain.starttime = self.domain.time
486
487            # Build a new data_structure.
488            next_data_structure=\
489                Data_format_sww(self.domain, mode=self.mode,\
490                                max_size = self.max_size,\
491                                recursion = self.recursion+1)
492            if not self.recursion:
493                print '    file_size = %s'%file_size
494                print '    saving file to %s'%next_data_structure.filename
495            #set up the new data_structure
496            self.domain.writer = next_data_structure
497
498            #FIXME - could be cleaner to use domain.store_timestep etc.
499            next_data_structure.store_connectivity()
500            next_data_structure.store_timestep(names)
501            fid.sync()
502            fid.close()
503
504            #restore the old starttime and filename
505            self.domain.starttime = old_domain_starttime
506            self.domain.set_name(old_domain_filename)           
507        else:
508            self.recursion = False
509            domain = self.domain
510
511            # Get the variables
512            time = fid.variables['time']
513            stage = fid.variables['stage']
514            xmomentum = fid.variables['xmomentum']
515            ymomentum = fid.variables['ymomentum']
516            i = len(time)
517            if type(names) not in [types.ListType, types.TupleType]:
518                names = [names]
519
520            if 'stage' in names and 'xmomentum' in names and \
521               'ymomentum' in names:
522
523                # Get stage
524                Q = domain.quantities['stage']
525                A,_ = Q.get_vertex_values(xy = False,
526                                          precision = self.precision)               
527                z = fid.variables['elevation']
528                stage = choose(A-z[:] >= self.minimum_storable_height,
529                           (z[:], A))
530               
531                # Get xmomentum
532                Q = domain.quantities['xmomentum']
533                xmomentum, _ = Q.get_vertex_values(xy = False,
534                                          precision = self.precision)
535               
536                # Get ymomentum
537                Q = domain.quantities['ymomentum']
538                ymomentum, _ = Q.get_vertex_values(xy = False,
539                                          precision = self.precision)
540               
541                # Write quantities to NetCDF
542                self.writer.store_quantities(fid, 
543                                             time=self.domain.time,
544                                             precision=self.precision,
545                                             stage=stage,
546                                             xmomentum=xmomentum,
547                                             ymomentum=ymomentum)
548            else:
549                # This is producing a sww that is not standard.
550                # Store time
551                time[i] = self.domain.time
552               
553                for name in names:
554                    # Get quantity
555                    Q = domain.quantities[name]
556                    A,V = Q.get_vertex_values(xy = False,
557                                              precision = self.precision)
558
559                    # FIXME: Make this general (see below)
560                    if name == 'stage':
561                        z = fid.variables['elevation']
562                        A = choose(A-z[:] >= self.minimum_storable_height,
563                                   (z[:], A))
564                        stage[i,:] = A.astype(self.precision)
565                    elif name == 'xmomentum':
566                        xmomentum[i,:] = A.astype(self.precision)
567                    elif name == 'ymomentum':
568                        ymomentum[i,:] = A.astype(self.precision)
569
570                   #As in....
571                   #eval( name + '[i,:] = A.astype(self.precision)' )
572                   #FIXME (Ole): But we need a UNIT test for that before
573                   # refactoring
574
575
576
577            # Update extrema if requested
578            domain = self.domain
579            if domain.quantities_to_be_monitored is not None:
580                for q, info in domain.quantities_to_be_monitored.items():
581
582                    if info['min'] is not None:
583                        fid.variables[q + '.extrema'][0] = info['min']
584                        fid.variables[q + '.min_location'][:] =\
585                                        info['min_location']
586                        fid.variables[q + '.min_time'][0] = info['min_time']
587                       
588                    if info['max'] is not None:
589                        fid.variables[q + '.extrema'][1] = info['max']
590                        fid.variables[q + '.max_location'][:] =\
591                                        info['max_location']
592                        fid.variables[q + '.max_time'][0] = info['max_time']
593
594           
595
596            #Flush and close
597            fid.sync()
598            fid.close()
599
600
601
602# Class for handling checkpoints data
603class Data_format_cpt(Data_format):
604    """Interface to native NetCDF format (.cpt)
605    """
606
607
608    def __init__(self, domain, mode = 'w'):
609        from Scientific.IO.NetCDF import NetCDFFile
610        from Numeric import Int, Float, Float
611
612        self.precision = Float #Use full precision
613
614        Data_format.__init__(self, domain, 'sww', mode)
615
616
617        # NetCDF file definition
618        fid = NetCDFFile(self.filename, mode)
619
620        if mode == 'w':
621            #Create new file
622            fid.institution = 'Geoscience Australia'
623            fid.description = 'Checkpoint data'
624            #fid.smooth = domain.smooth
625            fid.order = domain.default_order
626
627            # dimension definitions
628            fid.createDimension('number_of_volumes', self.number_of_volumes)
629            fid.createDimension('number_of_vertices', 3)
630
631            #Store info at all vertices (no smoothing)
632            fid.createDimension('number_of_points', 3*self.number_of_volumes)
633            fid.createDimension('number_of_timesteps', None) #extensible
634
635            # variable definitions
636
637            #Mesh
638            fid.createVariable('x', self.precision, ('number_of_points',))
639            fid.createVariable('y', self.precision, ('number_of_points',))
640
641
642            fid.createVariable('volumes', Int, ('number_of_volumes',
643                                                'number_of_vertices'))
644
645            fid.createVariable('time', self.precision,
646                               ('number_of_timesteps',))
647
648            #Allocate space for all quantities
649            for name in domain.quantities.keys():
650                fid.createVariable(name, self.precision,
651                                   ('number_of_timesteps',
652                                    'number_of_points'))
653
654        #Close
655        fid.close()
656
657
658    def store_checkpoint(self):
659        """
660        Write x,y coordinates of triangles.
661        Write connectivity (
662        constituting
663        the bed elevation.
664        """
665
666        from Scientific.IO.NetCDF import NetCDFFile
667
668        from Numeric import concatenate
669
670        domain = self.domain
671
672        #Get NetCDF
673        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
674
675        # Get the variables
676        x = fid.variables['x']
677        y = fid.variables['y']
678
679        volumes = fid.variables['volumes']
680
681        # Get X, Y and bed elevation Z
682        Q = domain.quantities['elevation']
683        X,Y,Z,V = Q.get_vertex_values(xy=True,
684                      precision = self.precision)
685
686
687
688        x[:] = X.astype(self.precision)
689        y[:] = Y.astype(self.precision)
690        z[:] = Z.astype(self.precision)
691
692        volumes[:] = V
693
694        #Close
695        fid.close()
696
697
698    def store_timestep(self, name):
699        """Store time and named quantity to file
700        """
701        from Scientific.IO.NetCDF import NetCDFFile
702        from time import sleep
703
704        #Get NetCDF
705        retries = 0
706        file_open = False
707        while not file_open and retries < 10:
708            try:
709                fid = NetCDFFile(self.filename, 'a') #Open existing file
710            except IOError:
711                #This could happen if someone was reading the file.
712                #In that case, wait a while and try again
713                msg = 'Warning (store_timestep): File %s could not be opened'\
714                  %self.filename
715                msg += ' - trying again'
716                print msg
717                retries += 1
718                sleep(1)
719            else:
720                file_open = True
721
722        if not file_open:
723            msg = 'File %s could not be opened for append' %self.filename
724            raise DataFileNotOPenError, msg
725
726
727        domain = self.domain
728
729        # Get the variables
730        time = fid.variables['time']
731        stage = fid.variables['stage']
732        i = len(time)
733
734        #Store stage
735        time[i] = self.domain.time
736
737        # Get quantity
738        Q = domain.quantities[name]
739        A,V = Q.get_vertex_values(xy=False,
740                                  precision = self.precision)
741
742        stage[i,:] = A.astype(self.precision)
743
744        #Flush and close
745        fid.sync()
746        fid.close()
747
748
749#### NED is national exposure database (name changed to NEXIS)
750   
751LAT_TITLE = 'LATITUDE'
752LONG_TITLE = 'LONGITUDE'
753X_TITLE = 'x'
754Y_TITLE = 'y'
755class Exposure_csv:
756    def __init__(self,file_name, latitude_title=LAT_TITLE,
757                 longitude_title=LONG_TITLE, is_x_y_locations=None,
758                 x_title=X_TITLE, y_title=Y_TITLE,
759                 refine_polygon=None, title_check_list=None):
760        """
761        This class is for handling the exposure csv file.
762        It reads the file in and converts the lats and longs to a geospatial
763        data object.
764        Use the methods to read and write columns.
765
766        The format of the csv files it reads is;
767           The first row is a title row.
768           comma's are the delimiters
769           each column is a 'set' of data
770
771        Feel free to use/expand it to read other csv files.
772           
773           
774        It is not for adding and deleting rows
775       
776        Can geospatial handle string attributes? It's not made for them.
777        Currently it can't load and save string att's.
778
779        So just use geospatial to hold the x, y and georef? Bad, since
780        different att's are in diferent structures.  Not so bad, the info
781        to write if the .csv file is saved is in attribute_dic
782
783        The location info is in the geospatial attribute.
784       
785       
786        """
787        self._file_name = file_name
788        self._geospatial = None #
789
790        # self._attribute_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are lists of column data
793       
794        # self._title_index_dic is a dictionary.
795        #The keys are the column titles.
796        #The values are the index positions of file columns.
797        self._attribute_dic, self._title_index_dic = \
798            csv2dict(self._file_name, title_check_list=title_check_list)
799        try:
800            #Have code here that handles caps or lower
801            lats = self._attribute_dic[latitude_title]
802            longs = self._attribute_dic[longitude_title]
803           
804        except KeyError:
805            # maybe a warning..
806            #Let's see if this works..
807            if False != is_x_y_locations:
808                is_x_y_locations = True
809            pass
810        else:
811            self._geospatial = Geospatial_data(latitudes = lats,
812                 longitudes = longs)
813
814        if is_x_y_locations is True:
815            if self._geospatial is not None:
816                pass #fixme throw an error
817            try:
818                xs = self._attribute_dic[x_title]
819                ys = self._attribute_dic[y_title]
820                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
821            except KeyError:
822                # maybe a warning..
823                msg = "Could not find location information."
824                raise TitleValueError, msg
825            else:
826                self._geospatial = Geospatial_data(data_points=points)
827           
828        # create a list of points that are in the refining_polygon
829        # described by a list of indexes representing the points
830
831    def __cmp__(self, other):
832        #print "self._attribute_dic",self._attribute_dic
833        #print "other._attribute_dic",other._attribute_dic
834        #print "self._title_index_dic", self._title_index_dic
835        #print "other._title_index_dic", other._title_index_dic
836       
837        #check that a is an instance of this class
838        if isinstance(self, type(other)):
839            result = cmp(self._attribute_dic, other._attribute_dic)
840            if result <>0:
841                return result
842            # The order of the columns is important. Therefore..
843            result = cmp(self._title_index_dic, other._title_index_dic)
844            if result <>0:
845                return result
846            for self_ls, other_ls in map(None,self._attribute_dic, \
847                    other._attribute_dic):
848                result = cmp(self._attribute_dic[self_ls],
849                             other._attribute_dic[other_ls])
850                if result <>0:
851                    return result
852            return 0
853        else:
854            return 1
855   
856
857    def get_column(self, column_name, use_refind_polygon=False):
858        """
859        Given a column name return a list of the column values
860
861        Note, the type of the values will be String!
862        do this to change a list of strings to a list of floats
863        time = [float(x) for x in time]
864       
865        Not implemented:
866        if use_refind_polygon is True, only return values in the
867        refined polygon
868        """
869        if not self._attribute_dic.has_key(column_name):
870            msg = 'Therer is  no column called %s!' %column_name
871            raise TitleValueError, msg
872        return self._attribute_dic[column_name]
873
874
875    def get_value(self, value_column_name,
876                  known_column_name,
877                  known_values,
878                  use_refind_polygon=False):
879        """
880        Do linear interpolation on the known_colum, using the known_value,
881        to return a value of the column_value_name.
882        """
883        pass
884
885
886    def get_location(self, use_refind_polygon=False):
887        """
888        Return a geospatial object which describes the
889        locations of the location file.
890
891        Note, if there is not location info, this returns None.
892       
893        Not implemented:
894        if use_refind_polygon is True, only return values in the
895        refined polygon
896        """
897        return self._geospatial
898
899    def set_column(self, column_name, column_values, overwrite=False):
900        """
901        Add a column to the 'end' (with the right most column being the end)
902        of the csv file.
903
904        Set overwrite to True if you want to overwrite a column.
905       
906        Note, in column_name white space is removed and case is not checked.
907        Precondition
908        The column_name and column_values cannot have comma's in it.
909        """
910       
911        value_row_count = \
912            len(self._attribute_dic[self._title_index_dic.keys()[0]])
913        if len(column_values) <> value_row_count: 
914            msg = 'The number of column values must equal the number of rows.'
915            raise DataMissingValuesError, msg
916       
917        if self._attribute_dic.has_key(column_name):
918            if not overwrite:
919                msg = 'Column name %s already in use!' %column_name
920                raise TitleValueError, msg
921        else:
922            # New title.  Add it to the title index.
923            self._title_index_dic[column_name] = len(self._title_index_dic)
924        self._attribute_dic[column_name] = column_values
925        #print "self._title_index_dic[column_name]",self._title_index_dic
926
927    def save(self, file_name=None):
928        """
929        Save the exposure csv file
930        """
931        if file_name is None:
932            file_name = self._file_name
933       
934        fd = open(file_name,'wb')
935        writer = csv.writer(fd)
936       
937        #Write the title to a cvs file
938        line = [None]* len(self._title_index_dic)
939        for title in self._title_index_dic.iterkeys():
940            line[self._title_index_dic[title]]= title
941        writer.writerow(line)
942       
943        # Write the values to a cvs file
944        value_row_count = \
945            len(self._attribute_dic[self._title_index_dic.keys()[0]])
946        for row_i in range(value_row_count):
947            line = [None]* len(self._title_index_dic)
948            for title in self._title_index_dic.iterkeys():
949                line[self._title_index_dic[title]]= \
950                     self._attribute_dic[title][row_i]
951            writer.writerow(line)
952
953
954def csv2dict(file_name, title_check_list=None):
955    """
956    Load in the csv as a dic, title as key and column info as value, .
957    Also, create a dic, title as key and column index as value,
958    to keep track of the column order.
959
960    Two dictionaries are returned.
961   
962    WARNING: Vaules are returned as strings.
963    do this to change a list of strings to a list of floats
964        time = [float(x) for x in time]
965
966       
967    """
968   
969    #
970    attribute_dic = {}
971    title_index_dic = {}
972    titles_stripped = [] # list of titles
973    reader = csv.reader(file(file_name))
974
975    # Read in and manipulate the title info
976    titles = reader.next()
977    for i,title in enumerate(titles):
978        titles_stripped.append(title.strip())
979        title_index_dic[title.strip()] = i
980    title_count = len(titles_stripped)       
981    #print "title_index_dic",title_index_dic
982    if title_check_list is not None:
983        for title_check in title_check_list:
984            #msg = "Reading error.  This row is not present ", title_check
985            #assert title_index_dic.has_key(title_check), msg
986            if not title_index_dic.has_key(title_check):
987                #reader.close()
988                msg = "Reading error.  This row is not present ", \
989                      title_check                     
990                raise IOError, msg
991               
992   
993   
994    #create a dic of colum values, indexed by column title
995    for line in reader:
996        if len(line) <> title_count:
997            raise IOError #FIXME make this nicer
998        for i, value in enumerate(line):
999            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
1000       
1001    return attribute_dic, title_index_dic
1002
1003
1004#Auxiliary
1005def write_obj(filename,x,y,z):
1006    """Store x,y,z vectors into filename (obj format)
1007       Vectors are assumed to have dimension (M,3) where
1008       M corresponds to the number elements.
1009       triangles are assumed to be disconnected
1010
1011       The three numbers in each vector correspond to three vertices,
1012
1013       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1014
1015    """
1016    #print 'Writing obj to %s' % filename
1017
1018    import os.path
1019
1020    root, ext = os.path.splitext(filename)
1021    if ext == '.obj':
1022        FN = filename
1023    else:
1024        FN = filename + '.obj'
1025
1026
1027    outfile = open(FN, 'wb')
1028    outfile.write("# Triangulation as an obj file\n")
1029
1030    M, N = x.shape
1031    assert N==3  #Assuming three vertices per element
1032
1033    for i in range(M):
1034        for j in range(N):
1035            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1036
1037    for i in range(M):
1038        base = i*N
1039        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1040
1041    outfile.close()
1042
1043
1044#########################################################
1045#Conversion routines
1046########################################################
1047
1048def sww2obj(basefilename, size):
1049    """Convert netcdf based data output to obj
1050    """
1051    from Scientific.IO.NetCDF import NetCDFFile
1052
1053    from Numeric import Float, zeros
1054
1055    #Get NetCDF
1056    FN = create_filename('.', basefilename, 'sww', size)
1057    print 'Reading from ', FN
1058    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1059
1060
1061    # Get the variables
1062    x = fid.variables['x']
1063    y = fid.variables['y']
1064    z = fid.variables['elevation']
1065    time = fid.variables['time']
1066    stage = fid.variables['stage']
1067
1068    M = size  #Number of lines
1069    xx = zeros((M,3), Float)
1070    yy = zeros((M,3), Float)
1071    zz = zeros((M,3), Float)
1072
1073    for i in range(M):
1074        for j in range(3):
1075            xx[i,j] = x[i+j*M]
1076            yy[i,j] = y[i+j*M]
1077            zz[i,j] = z[i+j*M]
1078
1079    #Write obj for bathymetry
1080    FN = create_filename('.', basefilename, 'obj', size)
1081    write_obj(FN,xx,yy,zz)
1082
1083
1084    #Now read all the data with variable information, combine with
1085    #x,y info and store as obj
1086
1087    for k in range(len(time)):
1088        t = time[k]
1089        print 'Processing timestep %f' %t
1090
1091        for i in range(M):
1092            for j in range(3):
1093                zz[i,j] = stage[k,i+j*M]
1094
1095
1096        #Write obj for variable data
1097        #FN = create_filename(basefilename, 'obj', size, time=t)
1098        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1099        write_obj(FN,xx,yy,zz)
1100
1101
1102def dat2obj(basefilename):
1103    """Convert line based data output to obj
1104    FIXME: Obsolete?
1105    """
1106
1107    import glob, os
1108    from anuga.config import data_dir
1109
1110
1111    #Get bathymetry and x,y's
1112    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1113
1114    from Numeric import zeros, Float
1115
1116    M = len(lines)  #Number of lines
1117    x = zeros((M,3), Float)
1118    y = zeros((M,3), Float)
1119    z = zeros((M,3), Float)
1120
1121    ##i = 0
1122    for i, line in enumerate(lines):
1123        tokens = line.split()
1124        values = map(float,tokens)
1125
1126        for j in range(3):
1127            x[i,j] = values[j*3]
1128            y[i,j] = values[j*3+1]
1129            z[i,j] = values[j*3+2]
1130
1131        ##i += 1
1132
1133
1134    #Write obj for bathymetry
1135    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1136
1137
1138    #Now read all the data files with variable information, combine with
1139    #x,y info
1140    #and store as obj
1141
1142    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1143
1144    for filename in files:
1145        print 'Processing %s' % filename
1146
1147        lines = open(data_dir+os.sep+filename,'r').readlines()
1148        assert len(lines) == M
1149        root, ext = os.path.splitext(filename)
1150
1151        #Get time from filename
1152        i0 = filename.find('_time=')
1153        if i0 == -1:
1154            #Skip bathymetry file
1155            continue
1156
1157        i0 += 6  #Position where time starts
1158        i1 = filename.find('.dat')
1159
1160        if i1 > i0:
1161            t = float(filename[i0:i1])
1162        else:
1163            raise DataTimeError, 'Hmmmm'
1164
1165
1166
1167        ##i = 0
1168        for i, line in enumerate(lines):
1169            tokens = line.split()
1170            values = map(float,tokens)
1171
1172            for j in range(3):
1173                z[i,j] = values[j]
1174
1175            ##i += 1
1176
1177        #Write obj for variable data
1178        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1179
1180
1181def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1182    """Read netcdf filename1, pick timesteps first:step:last and save to
1183    nettcdf file filename2
1184    """
1185    from Scientific.IO.NetCDF import NetCDFFile
1186
1187    #Get NetCDF
1188    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1189    outfile = NetCDFFile(filename2, 'w')  #Open new file
1190
1191
1192    #Copy dimensions
1193    for d in infile.dimensions:
1194        outfile.createDimension(d, infile.dimensions[d])
1195
1196    for name in infile.variables:
1197        var = infile.variables[name]
1198        outfile.createVariable(name, var.typecode(), var.dimensions)
1199
1200
1201    #Copy the static variables
1202    for name in infile.variables:
1203        if name == 'time' or name == 'stage':
1204            pass
1205        else:
1206            #Copy
1207            outfile.variables[name][:] = infile.variables[name][:]
1208
1209    #Copy selected timesteps
1210    time = infile.variables['time']
1211    stage = infile.variables['stage']
1212
1213    newtime = outfile.variables['time']
1214    newstage = outfile.variables['stage']
1215
1216    if last is None:
1217        last = len(time)
1218
1219    selection = range(first, last, step)
1220    for i, j in enumerate(selection):
1221        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1222        newtime[i] = time[j]
1223        newstage[i,:] = stage[j,:]
1224
1225    #Close
1226    infile.close()
1227    outfile.close()
1228
1229
1230#Get data objects
1231def get_dataobject(domain, mode='w'):
1232    """Return instance of class of given format using filename
1233    """
1234
1235    cls = eval('Data_format_%s' %domain.format)
1236    return cls(domain, mode)
1237
1238
1239
1240
1241def dem2pts(basename_in, basename_out=None,
1242            easting_min=None, easting_max=None,
1243            northing_min=None, northing_max=None,
1244            use_cache=False, verbose=False,):
1245    """Read Digitial Elevation model from the following NetCDF format (.dem)
1246
1247    Example:
1248
1249    ncols         3121
1250    nrows         1800
1251    xllcorner     722000
1252    yllcorner     5893000
1253    cellsize      25
1254    NODATA_value  -9999
1255    138.3698 137.4194 136.5062 135.5558 ..........
1256
1257    Convert to NetCDF pts format which is
1258
1259    points:  (Nx2) Float array
1260    elevation: N Float array
1261    """
1262
1263
1264
1265    kwargs = {'basename_out': basename_out,
1266              'easting_min': easting_min,
1267              'easting_max': easting_max,
1268              'northing_min': northing_min,
1269              'northing_max': northing_max,
1270              'verbose': verbose}
1271
1272    if use_cache is True:
1273        from caching import cache
1274        result = cache(_dem2pts, basename_in, kwargs,
1275                       dependencies = [basename_in + '.dem'],
1276                       verbose = verbose)
1277
1278    else:
1279        result = apply(_dem2pts, [basename_in], kwargs)
1280
1281    return result
1282
1283def _dem2pts(basename_in, basename_out=None, verbose=False,
1284            easting_min=None, easting_max=None,
1285            northing_min=None, northing_max=None):
1286    """Read Digitial Elevation model from the following NetCDF format (.dem)
1287
1288    Internal function. See public function dem2pts for details.
1289    """
1290
1291    # FIXME: Can this be written feasibly using write_pts?
1292
1293    import os
1294    from Scientific.IO.NetCDF import NetCDFFile
1295    from Numeric import Float, zeros, reshape, sum
1296
1297    root = basename_in
1298
1299    # Get NetCDF
1300    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
1301
1302    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1303
1304    ncols = infile.ncols[0]
1305    nrows = infile.nrows[0]
1306    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1307    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
1308    cellsize = infile.cellsize[0]
1309    NODATA_value = infile.NODATA_value[0]
1310    dem_elevation = infile.variables['elevation']
1311
1312    zone = infile.zone[0]
1313    false_easting = infile.false_easting[0]
1314    false_northing = infile.false_northing[0]
1315
1316    # Text strings
1317    projection = infile.projection
1318    datum = infile.datum
1319    units = infile.units
1320
1321
1322    # Get output file
1323    if basename_out == None:
1324        ptsname = root + '.pts'
1325    else:
1326        ptsname = basename_out + '.pts'
1327
1328    if verbose: print 'Store to NetCDF file %s' %ptsname
1329    # NetCDF file definition
1330    outfile = NetCDFFile(ptsname, 'w')
1331
1332    # Create new file
1333    outfile.institution = 'Geoscience Australia'
1334    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1335                          'of spatial point data'
1336    # Assign default values
1337    xllcorner = xllcorner + 0.5*cellsize # Convert to gridline registration
1338    yllcorner = yllcorner + 0.5*cellsize
1339
1340    if easting_min is None: easting_min = xllcorner
1341    if easting_max is None: easting_max = xllcorner + (ncols-1)*cellsize
1342    if northing_min is None: northing_min = yllcorner
1343    if northing_max is None: northing_max = yllcorner + (nrows-1)*cellsize
1344
1345    # Compute offsets to update georeferencing
1346    easting_offset = xllcorner - easting_min
1347    northing_offset = yllcorner - northing_min
1348
1349    # Georeferencing
1350    outfile.zone = zone
1351    outfile.xllcorner = easting_min # Easting of lower left corner
1352    outfile.yllcorner = northing_min # Northing of lower left corner
1353    outfile.false_easting = false_easting
1354    outfile.false_northing = false_northing
1355
1356    outfile.projection = projection
1357    outfile.datum = datum
1358    outfile.units = units
1359
1360
1361    # Grid info (FIXME: probably not going to be used, but heck)
1362    outfile.ncols = ncols
1363    outfile.nrows = nrows
1364
1365    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1366    totalnopoints = nrows*ncols
1367
1368    # Calculating number of NODATA_values for each row in clipped region
1369    # FIXME: use array operations to do faster
1370    nn = 0
1371    k = 0
1372    i1_0 = 0
1373    j1_0 = 0
1374    thisj = 0
1375    thisi = 0
1376    for i in range(nrows):
1377        y = (nrows-i-1)*cellsize + yllcorner
1378        for j in range(ncols):
1379            x = j*cellsize + xllcorner
1380            if easting_min <= x <= easting_max and \
1381               northing_min <= y <= northing_max:
1382                thisj = j
1383                thisi = i
1384                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1385
1386                if k == 0:
1387                    i1_0 = i
1388                    j1_0 = j
1389                k += 1
1390
1391    index1 = j1_0
1392    index2 = thisj
1393
1394    # Dimension definitions
1395    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1396    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1397
1398    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1399    nopoints = clippednopoints-nn
1400
1401    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1402
1403    if verbose:
1404        print 'There are %d values in the elevation' %totalnopoints
1405        print 'There are %d values in the clipped elevation' %clippednopoints
1406        print 'There are %d NODATA_values in the clipped elevation' %nn
1407
1408    outfile.createDimension('number_of_points', nopoints)
1409    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1410
1411    # Variable definitions
1412    outfile.createVariable('points', Float, ('number_of_points',
1413                                             'number_of_dimensions'))
1414    outfile.createVariable('elevation', Float, ('number_of_points',))
1415
1416    # Get handles to the variables
1417    points = outfile.variables['points']
1418    elevation = outfile.variables['elevation']
1419
1420    lenv = index2-index1+1
1421    # Store data
1422    global_index = 0
1423    # for i in range(nrows):
1424    for i in range(i1_0,thisi+1,1):
1425        if verbose and i%((nrows+10)/10)==0:
1426            print 'Processing row %d of %d' %(i, nrows)
1427
1428        lower_index = global_index
1429
1430        v = dem_elevation_r[i,index1:index2+1]
1431        no_NODATA = sum(v == NODATA_value)
1432        if no_NODATA > 0:
1433            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
1434        else:
1435            newcols = lenv # ncols_in_bounding_box
1436
1437        telev = zeros(newcols, Float)
1438        tpoints = zeros((newcols, 2), Float)
1439
1440        local_index = 0
1441
1442        y = (nrows-i-1)*cellsize + yllcorner
1443        #for j in range(ncols):
1444        for j in range(j1_0,index2+1,1):
1445
1446            x = j*cellsize + xllcorner
1447            if easting_min <= x <= easting_max and \
1448               northing_min <= y <= northing_max and \
1449               dem_elevation_r[i,j] <> NODATA_value:
1450                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1451                telev[local_index] = dem_elevation_r[i, j]
1452                global_index += 1
1453                local_index += 1
1454
1455        upper_index = global_index
1456
1457        if upper_index == lower_index + newcols:
1458            points[lower_index:upper_index, :] = tpoints
1459            elevation[lower_index:upper_index] = telev
1460
1461    assert global_index == nopoints, 'index not equal to number of points'
1462
1463    infile.close()
1464    outfile.close()
1465
1466
1467
1468def _read_hecras_cross_sections(lines):
1469    """Return block of surface lines for each cross section
1470    Starts with SURFACE LINE,
1471    Ends with END CROSS-SECTION
1472    """
1473
1474    points = []
1475
1476    reading_surface = False
1477    for i, line in enumerate(lines):
1478
1479        if len(line.strip()) == 0:    #Ignore blanks
1480            continue
1481
1482        if lines[i].strip().startswith('SURFACE LINE'):
1483            reading_surface = True
1484            continue
1485
1486        if lines[i].strip().startswith('END') and reading_surface:
1487            yield points
1488            reading_surface = False
1489            points = []
1490
1491        if reading_surface:
1492            fields = line.strip().split(',')
1493            easting = float(fields[0])
1494            northing = float(fields[1])
1495            elevation = float(fields[2])
1496            points.append([easting, northing, elevation])
1497
1498
1499
1500
1501def hecras_cross_sections2pts(basename_in,
1502                              basename_out=None,
1503                              verbose=False):
1504    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1505
1506    Example:
1507
1508
1509# RAS export file created on Mon 15Aug2005 11:42
1510# by HEC-RAS Version 3.1.1
1511
1512BEGIN HEADER:
1513  UNITS: METRIC
1514  DTM TYPE: TIN
1515  DTM: v:\1\cit\perth_topo\river_tin
1516  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1517  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1518  MAP PROJECTION: UTM
1519  PROJECTION ZONE: 50
1520  DATUM: AGD66
1521  VERTICAL DATUM:
1522  NUMBER OF REACHES:  19
1523  NUMBER OF CROSS-SECTIONS:  14206
1524END HEADER:
1525
1526
1527Only the SURFACE LINE data of the following form will be utilised
1528
1529  CROSS-SECTION:
1530    STREAM ID:Southern-Wungong
1531    REACH ID:Southern-Wungong
1532    STATION:19040.*
1533    CUT LINE:
1534      405548.671603161 , 6438142.7594925
1535      405734.536092045 , 6438326.10404912
1536      405745.130459356 , 6438331.48627354
1537      405813.89633823 , 6438368.6272789
1538    SURFACE LINE:
1539     405548.67,   6438142.76,   35.37
1540     405552.24,   6438146.28,   35.41
1541     405554.78,   6438148.78,   35.44
1542     405555.80,   6438149.79,   35.44
1543     405559.37,   6438153.31,   35.45
1544     405560.88,   6438154.81,   35.44
1545     405562.93,   6438156.83,   35.42
1546     405566.50,   6438160.35,   35.38
1547     405566.99,   6438160.83,   35.37
1548     ...
1549   END CROSS-SECTION
1550
1551    Convert to NetCDF pts format which is
1552
1553    points:  (Nx2) Float array
1554    elevation: N Float array
1555    """
1556
1557    import os
1558    from Scientific.IO.NetCDF import NetCDFFile
1559    from Numeric import Float, zeros, reshape
1560
1561    root = basename_in
1562
1563    #Get ASCII file
1564    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1565
1566    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1567
1568    lines = infile.readlines()
1569    infile.close()
1570
1571    if verbose: print 'Converting to pts format'
1572
1573    i = 0
1574    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1575        i += 1
1576
1577    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1578    i += 1
1579
1580    assert lines[i].strip().upper().startswith('UNITS:')
1581    units = lines[i].strip().split()[1]
1582    i += 1
1583
1584    assert lines[i].strip().upper().startswith('DTM TYPE:')
1585    i += 1
1586
1587    assert lines[i].strip().upper().startswith('DTM:')
1588    i += 1
1589
1590    assert lines[i].strip().upper().startswith('STREAM')
1591    i += 1
1592
1593    assert lines[i].strip().upper().startswith('CROSS')
1594    i += 1
1595
1596    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1597    projection = lines[i].strip().split(':')[1]
1598    i += 1
1599
1600    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1601    zone = int(lines[i].strip().split(':')[1])
1602    i += 1
1603
1604    assert lines[i].strip().upper().startswith('DATUM:')
1605    datum = lines[i].strip().split(':')[1]
1606    i += 1
1607
1608    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1609    i += 1
1610
1611    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1612    i += 1
1613
1614    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1615    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1616    i += 1
1617
1618
1619    #Now read all points
1620    points = []
1621    elevation = []
1622    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1623        for k, entry in enumerate(entries):
1624            points.append(entry[:2])
1625            elevation.append(entry[2])
1626
1627
1628    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1629          %(j+1, number_of_cross_sections)
1630    assert j+1 == number_of_cross_sections, msg
1631
1632    #Get output file
1633    if basename_out == None:
1634        ptsname = root + '.pts'
1635    else:
1636        ptsname = basename_out + '.pts'
1637
1638    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1639    geo = Geospatial_data(points, {"elevation":elevation},
1640                          verbose=verbose, geo_reference=geo_ref)
1641    geo.export_points_file(ptsname)
1642
1643def export_grid(basename_in, extra_name_out = None,
1644                quantities = None, # defaults to elevation
1645                timestep = None,
1646                reduction = None,
1647                cellsize = 10,
1648                NODATA_value = -9999,
1649                easting_min = None,
1650                easting_max = None,
1651                northing_min = None,
1652                northing_max = None,
1653                verbose = False,
1654                origin = None,
1655                datum = 'WGS84',
1656                format = 'ers'):
1657    """
1658   
1659    Wrapper for sww2dem. - see sww2dem to find out what most of the
1660    parameters do.
1661
1662    Quantities is a list of quantities.  Each quantity will be
1663    calculated for each sww file.
1664
1665    This returns the basenames of the files returned, which is made up
1666    of the dir and all of the file name, except the extension.
1667
1668    This function returns the names of the files produced.
1669
1670    It will also produce as many output files as there are input sww files.
1671    """
1672   
1673    if quantities is None:
1674        quantities = ['elevation']
1675       
1676    if type(quantities) is str:
1677            quantities = [quantities]
1678
1679    # How many sww files are there?
1680    dir, base = os.path.split(basename_in)
1681    #print "basename_in",basename_in
1682    #print "base",base
1683
1684    iterate_over = get_all_swwfiles(dir,base,verbose)
1685   
1686    if dir == "":
1687        dir = "." # Unix compatibility
1688   
1689    files_out = []
1690    #print 'sww_file',iterate_over
1691    for sww_file in iterate_over:
1692        for quantity in quantities:
1693            if extra_name_out is None:
1694                basename_out = sww_file + '_' + quantity
1695            else:
1696                basename_out = sww_file + '_' + quantity + '_' \
1697                               + extra_name_out
1698#            print "basename_out", basename_out
1699       
1700            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
1701                               quantity, 
1702                               timestep,
1703                               reduction,
1704                               cellsize,
1705                               NODATA_value,
1706                               easting_min,
1707                               easting_max,
1708                               northing_min,
1709                               northing_max,
1710                               verbose,
1711                               origin,
1712                               datum,
1713                               format)
1714            files_out.append(file_out)
1715    #print "basenames_out after",basenames_out
1716    return files_out
1717
1718
1719def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1720                   plot_quantity, generate_fig = False,
1721                   reportname = None, surface = False, time_min = None,
1722                   time_max = None, title_on = False, verbose = True,
1723                   nodes=None):
1724    """
1725    nodes - number of processes used.
1726
1727    warning - this function has no tests
1728    """
1729    if reportname == None:
1730        report = False
1731    else:
1732        report = True
1733       
1734    if nodes is None:
1735        is_parallel = False
1736    else:
1737        is_parallel = True
1738       
1739    # Generate figures
1740    swwfiles = {}
1741   
1742    if is_parallel is True:   
1743        for i in range(nodes):
1744            print 'Sending node %d of %d' %(i,nodes)
1745            swwfiles = {}
1746            if not reportname == None:
1747                reportname = report_name + '_%s' %(i)
1748            for label_id in production_dirs.keys():
1749                if label_id == 'boundaries':
1750                    swwfile = best_boundary_sww
1751                else:
1752                    file_loc = output_dir + label_id + sep
1753                    sww_extra = '_P%s_%s' %(i,nodes)
1754                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1755                    print 'swwfile',swwfile
1756                    swwfiles[swwfile] = label_id
1757
1758                texname, elev_output = sww2timeseries(swwfiles,
1759                                              gauges_dir_name,
1760                                              production_dirs,
1761                                              report = report,
1762                                              reportname = reportname,
1763                                              plot_quantity = plot_quantity,
1764                                              generate_fig = generate_fig,
1765                                              surface = surface,
1766                                              time_min = time_min,
1767                                              time_max = time_max,
1768                                              title_on = title_on,
1769                                              verbose = verbose)
1770    else:   
1771        for label_id in production_dirs.keys():       
1772            if label_id == 'boundaries':
1773                print 'boundaries'
1774                file_loc = project.boundaries_in_dir
1775                swwfile = project.boundaries_dir_name3 + '.sww'
1776                #  swwfile = boundary_dir_filename
1777            else:
1778                file_loc = output_dir + label_id + sep
1779                swwfile = file_loc + scenario_name + '.sww'
1780            swwfiles[swwfile] = label_id
1781       
1782        texname, elev_output = sww2timeseries(swwfiles,
1783                                              gauges_dir_name,
1784                                              production_dirs,
1785                                              report = report,
1786                                              reportname = reportname,
1787                                              plot_quantity = plot_quantity,
1788                                              generate_fig = generate_fig,
1789                                              surface = surface,
1790                                              time_min = time_min,
1791                                              time_max = time_max,
1792                                              title_on = title_on,
1793                                              verbose = verbose)
1794                                         
1795
1796   
1797def sww2dem(basename_in, basename_out = None,
1798            quantity = None, # defaults to elevation
1799            timestep = None,
1800            reduction = None,
1801            cellsize = 10,
1802            NODATA_value = -9999,
1803            easting_min = None,
1804            easting_max = None,
1805            northing_min = None,
1806            northing_max = None,
1807            verbose = False,
1808            origin = None,
1809            datum = 'WGS84',
1810            format = 'ers'):
1811
1812    """Read SWW file and convert to Digitial Elevation model format
1813    (.asc or .ers)
1814
1815    Example (ASC):
1816
1817    ncols         3121
1818    nrows         1800
1819    xllcorner     722000
1820    yllcorner     5893000
1821    cellsize      25
1822    NODATA_value  -9999
1823    138.3698 137.4194 136.5062 135.5558 ..........
1824
1825    Also write accompanying file with same basename_in but extension .prj
1826    used to fix the UTM zone, datum, false northings and eastings.
1827
1828    The prj format is assumed to be as
1829
1830    Projection    UTM
1831    Zone          56
1832    Datum         WGS84
1833    Zunits        NO
1834    Units         METERS
1835    Spheroid      WGS84
1836    Xshift        0.0000000000
1837    Yshift        10000000.0000000000
1838    Parameters
1839
1840
1841    The parameter quantity must be the name of an existing quantity or
1842    an expression involving existing quantities. The default is
1843    'elevation'. Quantity is not a list of quantities.
1844
1845    if timestep (an index) is given, output quantity at that timestep
1846
1847    if reduction is given use that to reduce quantity over all timesteps.
1848
1849    datum
1850
1851    format can be either 'asc' or 'ers'
1852    """
1853
1854    import sys
1855    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1856         sometrue
1857    from Numeric import array2string
1858
1859    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1860         separate_points_by_polygon
1861    from anuga.abstract_2d_finite_volumes.util import \
1862         apply_expression_to_dictionary
1863
1864    msg = 'Format must be either asc or ers'
1865    assert format.lower() in ['asc', 'ers'], msg
1866
1867
1868    false_easting = 500000
1869    false_northing = 10000000
1870
1871    if quantity is None:
1872        quantity = 'elevation'
1873       
1874    if reduction is None:
1875        reduction = max
1876
1877    if basename_out is None:
1878        basename_out = basename_in + '_%s' %quantity
1879
1880    if quantity_formula.has_key(quantity):
1881        quantity = quantity_formula[quantity]
1882       
1883    swwfile = basename_in + '.sww'
1884    demfile = basename_out + '.' + format
1885    # Note the use of a .ers extension is optional (write_ermapper_grid will
1886    # deal with either option
1887   
1888    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
1889   
1890    # Read sww file
1891    if verbose: 
1892        print 'Reading from %s' %swwfile
1893        print 'Output directory is %s' %basename_out
1894   
1895    from Scientific.IO.NetCDF import NetCDFFile
1896    fid = NetCDFFile(swwfile)
1897
1898    #Get extent and reference
1899    x = fid.variables['x'][:]
1900    y = fid.variables['y'][:]
1901    volumes = fid.variables['volumes'][:]
1902    if timestep is not None:
1903        times = fid.variables['time'][timestep]
1904    else:
1905        times = fid.variables['time'][:]
1906
1907    number_of_timesteps = fid.dimensions['number_of_timesteps']
1908    number_of_points = fid.dimensions['number_of_points']
1909   
1910    if origin is None:
1911
1912        # Get geo_reference
1913        # sww files don't have to have a geo_ref
1914        try:
1915            geo_reference = Geo_reference(NetCDFObject=fid)
1916        except AttributeError, e:
1917            geo_reference = Geo_reference() # Default georef object
1918
1919        xllcorner = geo_reference.get_xllcorner()
1920        yllcorner = geo_reference.get_yllcorner()
1921        zone = geo_reference.get_zone()
1922    else:
1923        zone = origin[0]
1924        xllcorner = origin[1]
1925        yllcorner = origin[2]
1926
1927
1928
1929    # FIXME: Refactor using code from Interpolation_function.statistics
1930    # (in interpolate.py)
1931    # Something like print swwstats(swwname)
1932    if verbose:
1933        print '------------------------------------------------'
1934        print 'Statistics of SWW file:'
1935        print '  Name: %s' %swwfile
1936        print '  Reference:'
1937        print '    Lower left corner: [%f, %f]'\
1938              %(xllcorner, yllcorner)
1939        if timestep is not None:
1940            print '    Time: %f' %(times)
1941        else:
1942            print '    Start time: %f' %fid.starttime[0]
1943        print '  Extent:'
1944        print '    x [m] in [%f, %f], len(x) == %d'\
1945              %(min(x.flat), max(x.flat), len(x.flat))
1946        print '    y [m] in [%f, %f], len(y) == %d'\
1947              %(min(y.flat), max(y.flat), len(y.flat))
1948        if timestep is not None:
1949            print '    t [s] = %f, len(t) == %d' %(times, 1)
1950        else:
1951            print '    t [s] in [%f, %f], len(t) == %d'\
1952              %(min(times), max(times), len(times))
1953        print '  Quantities [SI units]:'
1954        for name in ['stage', 'xmomentum', 'ymomentum']:
1955            q = fid.variables[name][:].flat
1956            if timestep is not None:
1957                q = q[timestep*len(x):(timestep+1)*len(x)]
1958            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1959        for name in ['elevation']:
1960            q = fid.variables[name][:].flat
1961            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1962           
1963    # Get quantity and reduce if applicable
1964    if verbose: print 'Processing quantity %s' %quantity
1965
1966    # Turn NetCDF objects into Numeric arrays
1967    quantity_dict = {}
1968    for name in fid.variables.keys():
1969        quantity_dict[name] = fid.variables[name][:] 
1970
1971
1972    # Convert quantity expression to quantities found in sww file   
1973    q = apply_expression_to_dictionary(quantity, quantity_dict)
1974
1975    if len(q.shape) == 2:
1976        #q has a time component and needs to be reduced along
1977        #the temporal dimension
1978        if verbose: print 'Reducing quantity %s' %quantity
1979        q_reduced = zeros( number_of_points, Float )
1980       
1981        if timestep is not None:
1982            for k in range(number_of_points):
1983                q_reduced[k] = q[timestep,k] 
1984        else:
1985            for k in range(number_of_points):
1986                q_reduced[k] = reduction( q[:,k] )
1987
1988        q = q_reduced
1989
1990    #Post condition: Now q has dimension: number_of_points
1991    assert len(q.shape) == 1
1992    assert q.shape[0] == number_of_points
1993
1994
1995    if verbose:
1996        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1997
1998
1999    #Create grid and update xll/yll corner and x,y
2000
2001    #Relative extent
2002    if easting_min is None:
2003        xmin = min(x)
2004    else:
2005        xmin = easting_min - xllcorner
2006
2007    if easting_max is None:
2008        xmax = max(x)
2009    else:
2010        xmax = easting_max - xllcorner
2011
2012    if northing_min is None:
2013        ymin = min(y)
2014    else:
2015        ymin = northing_min - yllcorner
2016
2017    if northing_max is None:
2018        ymax = max(y)
2019    else:
2020        ymax = northing_max - yllcorner
2021
2022
2023
2024    if verbose: print 'Creating grid'
2025    ncols = int((xmax-xmin)/cellsize)+1
2026    nrows = int((ymax-ymin)/cellsize)+1
2027
2028
2029    #New absolute reference and coordinates
2030    newxllcorner = xmin+xllcorner
2031    newyllcorner = ymin+yllcorner
2032
2033    x = x+xllcorner-newxllcorner
2034    y = y+yllcorner-newyllcorner
2035   
2036    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2037    assert len(vertex_points.shape) == 2
2038
2039    grid_points = zeros ( (ncols*nrows, 2), Float )
2040
2041
2042    for i in xrange(nrows):
2043        if format.lower() == 'asc':
2044            yg = i*cellsize
2045        else:
2046        #this will flip the order of the y values for ers
2047            yg = (nrows-i)*cellsize
2048
2049        for j in xrange(ncols):
2050            xg = j*cellsize
2051            k = i*ncols + j
2052
2053            grid_points[k,0] = xg
2054            grid_points[k,1] = yg
2055
2056    #Interpolate
2057    from anuga.fit_interpolate.interpolate import Interpolate
2058
2059    # Remove loners from vertex_points, volumes here
2060    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
2061    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
2062    #import sys; sys.exit()
2063    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2064
2065    #Interpolate using quantity values
2066    if verbose: print 'Interpolating'
2067    grid_values = interp.interpolate(q, grid_points).flat
2068
2069
2070    if verbose:
2071        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2072                                                      max(grid_values))
2073
2074    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2075    P = interp.mesh.get_boundary_polygon()
2076    outside_indices = outside_polygon(grid_points, P, closed=True)
2077
2078    for i in outside_indices:
2079        grid_values[i] = NODATA_value
2080
2081
2082
2083
2084    if format.lower() == 'ers':
2085        # setup ERS header information
2086        grid_values = reshape(grid_values,(nrows, ncols))
2087        header = {}
2088        header['datum'] = '"' + datum + '"'
2089        # FIXME The use of hardwired UTM and zone number needs to be made optional
2090        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2091        header['projection'] = '"UTM-' + str(zone) + '"'
2092        header['coordinatetype'] = 'EN'
2093        if header['coordinatetype'] == 'LL':
2094            header['longitude'] = str(newxllcorner)
2095            header['latitude'] = str(newyllcorner)
2096        elif header['coordinatetype'] == 'EN':
2097            header['eastings'] = str(newxllcorner)
2098            header['northings'] = str(newyllcorner)
2099        header['nullcellvalue'] = str(NODATA_value)
2100        header['xdimension'] = str(cellsize)
2101        header['ydimension'] = str(cellsize)
2102        header['value'] = '"' + quantity + '"'
2103        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2104
2105
2106        #Write
2107        if verbose: print 'Writing %s' %demfile
2108        import ermapper_grids
2109        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2110
2111        fid.close()
2112    else:
2113        #Write to Ascii format
2114
2115        #Write prj file
2116        prjfile = basename_out + '.prj'
2117
2118        if verbose: print 'Writing %s' %prjfile
2119        prjid = open(prjfile, 'w')
2120        prjid.write('Projection    %s\n' %'UTM')
2121        prjid.write('Zone          %d\n' %zone)
2122        prjid.write('Datum         %s\n' %datum)
2123        prjid.write('Zunits        NO\n')
2124        prjid.write('Units         METERS\n')
2125        prjid.write('Spheroid      %s\n' %datum)
2126        prjid.write('Xshift        %d\n' %false_easting)
2127        prjid.write('Yshift        %d\n' %false_northing)
2128        prjid.write('Parameters\n')
2129        prjid.close()
2130
2131
2132
2133        if verbose: print 'Writing %s' %demfile
2134
2135        ascid = open(demfile, 'w')
2136
2137        ascid.write('ncols         %d\n' %ncols)
2138        ascid.write('nrows         %d\n' %nrows)
2139        ascid.write('xllcorner     %d\n' %newxllcorner)
2140        ascid.write('yllcorner     %d\n' %newyllcorner)
2141        ascid.write('cellsize      %f\n' %cellsize)
2142        ascid.write('NODATA_value  %d\n' %NODATA_value)
2143
2144
2145        #Get bounding polygon from mesh
2146        #P = interp.mesh.get_boundary_polygon()
2147        #inside_indices = inside_polygon(grid_points, P)
2148
2149        for i in range(nrows):
2150            if verbose and i%((nrows+10)/10)==0:
2151                print 'Doing row %d of %d' %(i, nrows)
2152
2153            base_index = (nrows-i-1)*ncols
2154
2155            slice = grid_values[base_index:base_index+ncols]
2156            s = array2string(slice, max_line_width=sys.maxint)
2157            ascid.write(s[1:-1] + '\n')
2158
2159
2160            #print
2161            #for j in range(ncols):
2162            #    index = base_index+j#
2163            #    print grid_values[index],
2164            #    ascid.write('%f ' %grid_values[index])
2165            #ascid.write('\n')
2166
2167        #Close
2168        ascid.close()
2169        fid.close()
2170        return basename_out
2171
2172#Backwards compatibility
2173def sww2asc(basename_in, basename_out = None,
2174            quantity = None,
2175            timestep = None,
2176            reduction = None,
2177            cellsize = 10,
2178            verbose = False,
2179            origin = None):
2180    print 'sww2asc will soon be obsoleted - please use sww2dem'
2181    sww2dem(basename_in,
2182            basename_out = basename_out,
2183            quantity = quantity,
2184            timestep = timestep,
2185            reduction = reduction,
2186            cellsize = cellsize,
2187            verbose = verbose,
2188            origin = origin,
2189        datum = 'WGS84',
2190        format = 'asc')
2191
2192def sww2ers(basename_in, basename_out = None,
2193            quantity = None,
2194            timestep = None,
2195            reduction = None,
2196            cellsize = 10,
2197            verbose = False,
2198            origin = None,
2199            datum = 'WGS84'):
2200    print 'sww2ers will soon be obsoleted - please use sww2dem'
2201    sww2dem(basename_in,
2202            basename_out = basename_out,
2203            quantity = quantity,
2204            timestep = timestep,
2205            reduction = reduction,
2206            cellsize = cellsize,
2207            verbose = verbose,
2208            origin = origin,
2209            datum = datum,
2210            format = 'ers')
2211################################# END COMPATIBILITY ##############
2212
2213
2214
2215def sww2pts(basename_in, basename_out=None,
2216            data_points=None,
2217            quantity=None,
2218            timestep=None,
2219            reduction=None,
2220            NODATA_value=-9999,
2221            verbose=False,
2222            origin=None):
2223            #datum = 'WGS84')
2224
2225
2226    """Read SWW file and convert to interpolated values at selected points
2227
2228    The parameter quantity' must be the name of an existing quantity or
2229    an expression involving existing quantities. The default is
2230    'elevation'.
2231
2232    if timestep (an index) is given, output quantity at that timestep
2233
2234    if reduction is given use that to reduce quantity over all timesteps.
2235
2236    data_points (Nx2 array) give locations of points where quantity is to be computed.
2237   
2238    """
2239
2240    import sys
2241    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2242    from Numeric import array2string
2243
2244    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2245    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2246
2247    from anuga.geospatial_data.geospatial_data import Geospatial_data
2248
2249    if quantity is None:
2250        quantity = 'elevation'
2251
2252    if reduction is None:
2253        reduction = max
2254
2255    if basename_out is None:
2256        basename_out = basename_in + '_%s' %quantity
2257
2258    swwfile = basename_in + '.sww'
2259    ptsfile = basename_out + '.pts'
2260
2261    # Read sww file
2262    if verbose: print 'Reading from %s' %swwfile
2263    from Scientific.IO.NetCDF import NetCDFFile
2264    fid = NetCDFFile(swwfile)
2265
2266    # Get extent and reference
2267    x = fid.variables['x'][:]
2268    y = fid.variables['y'][:]
2269    volumes = fid.variables['volumes'][:]
2270
2271    number_of_timesteps = fid.dimensions['number_of_timesteps']
2272    number_of_points = fid.dimensions['number_of_points']
2273    if origin is None:
2274
2275        # Get geo_reference
2276        # sww files don't have to have a geo_ref
2277        try:
2278            geo_reference = Geo_reference(NetCDFObject=fid)
2279        except AttributeError, e:
2280            geo_reference = Geo_reference() #Default georef object
2281
2282        xllcorner = geo_reference.get_xllcorner()
2283        yllcorner = geo_reference.get_yllcorner()
2284        zone = geo_reference.get_zone()
2285    else:
2286        zone = origin[0]
2287        xllcorner = origin[1]
2288        yllcorner = origin[2]
2289
2290
2291
2292    # FIXME: Refactor using code from file_function.statistics
2293    # Something like print swwstats(swwname)
2294    if verbose:
2295        x = fid.variables['x'][:]
2296        y = fid.variables['y'][:]
2297        times = fid.variables['time'][:]
2298        print '------------------------------------------------'
2299        print 'Statistics of SWW file:'
2300        print '  Name: %s' %swwfile
2301        print '  Reference:'
2302        print '    Lower left corner: [%f, %f]'\
2303              %(xllcorner, yllcorner)
2304        print '    Start time: %f' %fid.starttime[0]
2305        print '  Extent:'
2306        print '    x [m] in [%f, %f], len(x) == %d'\
2307              %(min(x.flat), max(x.flat), len(x.flat))
2308        print '    y [m] in [%f, %f], len(y) == %d'\
2309              %(min(y.flat), max(y.flat), len(y.flat))
2310        print '    t [s] in [%f, %f], len(t) == %d'\
2311              %(min(times), max(times), len(times))
2312        print '  Quantities [SI units]:'
2313        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2314            q = fid.variables[name][:].flat
2315            print '    %s in [%f, %f]' %(name, min(q), max(q))
2316
2317
2318
2319    # Get quantity and reduce if applicable
2320    if verbose: print 'Processing quantity %s' %quantity
2321
2322    # Turn NetCDF objects into Numeric arrays
2323    quantity_dict = {}
2324    for name in fid.variables.keys():
2325        quantity_dict[name] = fid.variables[name][:]
2326
2327
2328
2329    # Convert quantity expression to quantities found in sww file   
2330    q = apply_expression_to_dictionary(quantity, quantity_dict)
2331
2332
2333
2334    if len(q.shape) == 2:
2335        # q has a time component and needs to be reduced along
2336        # the temporal dimension
2337        if verbose: print 'Reducing quantity %s' %quantity
2338        q_reduced = zeros( number_of_points, Float )
2339
2340        for k in range(number_of_points):
2341            q_reduced[k] = reduction( q[:,k] )
2342
2343        q = q_reduced
2344
2345    # Post condition: Now q has dimension: number_of_points
2346    assert len(q.shape) == 1
2347    assert q.shape[0] == number_of_points
2348
2349
2350    if verbose:
2351        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2352
2353
2354    # Create grid and update xll/yll corner and x,y
2355    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2356    assert len(vertex_points.shape) == 2
2357
2358    # Interpolate
2359    from anuga.fit_interpolate.interpolate import Interpolate
2360    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2361
2362    # Interpolate using quantity values
2363    if verbose: print 'Interpolating'
2364    interpolated_values = interp.interpolate(q, data_points).flat
2365
2366
2367    if verbose:
2368        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2369                                                      max(interpolated_values))
2370
2371    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2372    P = interp.mesh.get_boundary_polygon()
2373    outside_indices = outside_polygon(data_points, P, closed=True)
2374
2375    for i in outside_indices:
2376        interpolated_values[i] = NODATA_value
2377
2378
2379    # Store results   
2380    G = Geospatial_data(data_points=data_points,
2381                        attributes=interpolated_values)
2382
2383    G.export_points_file(ptsfile, absolute = True)
2384
2385    fid.close()
2386
2387
2388def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2389                                  use_cache = False,
2390                                  verbose = False):
2391    """Read Digitial Elevation model from the following ASCII format (.asc)
2392
2393    Example:
2394
2395    ncols         3121
2396    nrows         1800
2397    xllcorner     722000
2398    yllcorner     5893000
2399    cellsize      25
2400    NODATA_value  -9999
2401    138.3698 137.4194 136.5062 135.5558 ..........
2402
2403    Convert basename_in + '.asc' to NetCDF format (.dem)
2404    mimicking the ASCII format closely.
2405
2406
2407    An accompanying file with same basename_in but extension .prj must exist
2408    and is used to fix the UTM zone, datum, false northings and eastings.
2409
2410    The prj format is assumed to be as
2411
2412    Projection    UTM
2413    Zone          56
2414    Datum         WGS84
2415    Zunits        NO
2416    Units         METERS
2417    Spheroid      WGS84
2418    Xshift        0.0000000000
2419    Yshift        10000000.0000000000
2420    Parameters
2421    """
2422
2423
2424
2425    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2426
2427    if use_cache is True:
2428        from caching import cache
2429        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2430                       dependencies = [basename_in + '.asc',
2431                                       basename_in + '.prj'],
2432                       verbose = verbose)
2433
2434    else:
2435        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2436
2437    return result
2438
2439
2440
2441
2442
2443
2444def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2445                                  verbose = False):
2446    """Read Digitial Elevation model from the following ASCII format (.asc)
2447
2448    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2449    """
2450
2451    import os
2452    from Scientific.IO.NetCDF import NetCDFFile
2453    from Numeric import Float, array
2454
2455    #root, ext = os.path.splitext(basename_in)
2456    root = basename_in
2457
2458    ###########################################
2459    # Read Meta data
2460    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2461    metadatafile = open(root + '.prj')
2462    metalines = metadatafile.readlines()
2463    metadatafile.close()
2464
2465    L = metalines[0].strip().split()
2466    assert L[0].strip().lower() == 'projection'
2467    projection = L[1].strip()                   #TEXT
2468
2469    L = metalines[1].strip().split()
2470    assert L[0].strip().lower() == 'zone'
2471    zone = int(L[1].strip())
2472
2473    L = metalines[2].strip().split()
2474    assert L[0].strip().lower() == 'datum'
2475    datum = L[1].strip()                        #TEXT
2476
2477    L = metalines[3].strip().split()
2478    assert L[0].strip().lower() == 'zunits'     #IGNORE
2479    zunits = L[1].strip()                       #TEXT
2480
2481    L = metalines[4].strip().split()
2482    assert L[0].strip().lower() == 'units'
2483    units = L[1].strip()                        #TEXT
2484
2485    L = metalines[5].strip().split()
2486    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2487    spheroid = L[1].strip()                     #TEXT
2488
2489    L = metalines[6].strip().split()
2490    assert L[0].strip().lower() == 'xshift'
2491    false_easting = float(L[1].strip())
2492
2493    L = metalines[7].strip().split()
2494    assert L[0].strip().lower() == 'yshift'
2495    false_northing = float(L[1].strip())
2496
2497    #print false_easting, false_northing, zone, datum
2498
2499
2500    ###########################################
2501    #Read DEM data
2502
2503    datafile = open(basename_in + '.asc')
2504
2505    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2506    lines = datafile.readlines()
2507    datafile.close()
2508
2509    if verbose: print 'Got', len(lines), ' lines'
2510
2511    ncols = int(lines[0].split()[1].strip())
2512    nrows = int(lines[1].split()[1].strip())
2513    xllcorner = float(lines[2].split()[1].strip())
2514    yllcorner = float(lines[3].split()[1].strip())
2515    cellsize = float(lines[4].split()[1].strip())
2516    NODATA_value = int(lines[5].split()[1].strip())
2517
2518    assert len(lines) == nrows + 6
2519
2520
2521    ##########################################
2522
2523
2524    if basename_out == None:
2525        netcdfname = root + '.dem'
2526    else:
2527        netcdfname = basename_out + '.dem'
2528
2529    if verbose: print 'Store to NetCDF file %s' %netcdfname
2530    # NetCDF file definition
2531    fid = NetCDFFile(netcdfname, 'w')
2532
2533    #Create new file
2534    fid.institution = 'Geoscience Australia'
2535    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2536                      'of spatial point data'
2537
2538    fid.ncols = ncols
2539    fid.nrows = nrows
2540    fid.xllcorner = xllcorner
2541    fid.yllcorner = yllcorner
2542    fid.cellsize = cellsize
2543    fid.NODATA_value = NODATA_value
2544
2545    fid.zone = zone
2546    fid.false_easting = false_easting
2547    fid.false_northing = false_northing
2548    fid.projection = projection
2549    fid.datum = datum
2550    fid.units = units
2551
2552
2553    # dimension definitions
2554    fid.createDimension('number_of_rows', nrows)
2555    fid.createDimension('number_of_columns', ncols)
2556
2557    # variable definitions
2558    fid.createVariable('elevation', Float, ('number_of_rows',
2559                                            'number_of_columns'))
2560
2561    # Get handles to the variables
2562    elevation = fid.variables['elevation']
2563
2564    #Store data
2565    n = len(lines[6:])
2566    for i, line in enumerate(lines[6:]):
2567        fields = line.split()
2568        if verbose and i%((n+10)/10)==0:
2569            print 'Processing row %d of %d' %(i, nrows)
2570
2571        elevation[i, :] = array([float(x) for x in fields])
2572
2573    fid.close()
2574
2575
2576
2577
2578
2579def ferret2sww(basename_in, basename_out = None,
2580               verbose = False,
2581               minlat = None, maxlat = None,
2582               minlon = None, maxlon = None,
2583               mint = None, maxt = None, mean_stage = 0,
2584               origin = None, zscale = 1,
2585               fail_on_NaN = True,
2586               NaN_filler = 0,
2587               elevation = None,
2588               inverted_bathymetry = True
2589               ): #FIXME: Bathymetry should be obtained
2590                                  #from MOST somehow.
2591                                  #Alternatively from elsewhere
2592                                  #or, as a last resort,
2593                                  #specified here.
2594                                  #The value of -100 will work
2595                                  #for the Wollongong tsunami
2596                                  #scenario but is very hacky
2597    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2598    sww format native to abstract_2d_finite_volumes.
2599
2600    Specify only basename_in and read files of the form
2601    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2602    relative height, x-velocity and y-velocity, respectively.
2603
2604    Also convert latitude and longitude to UTM. All coordinates are
2605    assumed to be given in the GDA94 datum.
2606
2607    min's and max's: If omitted - full extend is used.
2608    To include a value min may equal it, while max must exceed it.
2609    Lat and lon are assuemd to be in decimal degrees
2610
2611    origin is a 3-tuple with geo referenced
2612    UTM coordinates (zone, easting, northing)
2613
2614    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2615    which means that longitude is the fastest
2616    varying dimension (row major order, so to speak)
2617
2618    ferret2sww uses grid points as vertices in a triangular grid
2619    counting vertices from lower left corner upwards, then right
2620    """
2621
2622    import os
2623    from Scientific.IO.NetCDF import NetCDFFile
2624    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2625    from Numeric import allclose, around
2626
2627    precision = Float
2628
2629    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2630
2631    if minlat != None:
2632        assert -90 < minlat < 90 , msg
2633    if maxlat != None:
2634        assert -90 < maxlat < 90 , msg
2635        if minlat != None:
2636            assert maxlat > minlat
2637    if minlon != None:
2638        assert -180 < minlon < 180 , msg
2639    if maxlon != None:
2640        assert -180 < maxlon < 180 , msg
2641        if minlon != None:
2642            assert maxlon > minlon
2643       
2644
2645
2646    #Get NetCDF data
2647    if verbose: print 'Reading files %s_*.nc' %basename_in
2648    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2649    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2650    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2651    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2652    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2653
2654    if basename_out is None:
2655        swwname = basename_in + '.sww'
2656    else:
2657        swwname = basename_out + '.sww'
2658
2659    # Get dimensions of file_h
2660    for dimension in file_h.dimensions.keys():
2661        if dimension[:3] == 'LON':
2662            dim_h_longitude = dimension
2663        if dimension[:3] == 'LAT':
2664            dim_h_latitude = dimension
2665        if dimension[:4] == 'TIME':
2666            dim_h_time = dimension
2667
2668#    print 'long:', dim_h_longitude
2669#    print 'lats:', dim_h_latitude
2670#    print 'times:', dim_h_time
2671
2672    times = file_h.variables[dim_h_time]
2673    latitudes = file_h.variables[dim_h_latitude]
2674    longitudes = file_h.variables[dim_h_longitude]
2675   
2676    # get dimensions for file_e
2677    for dimension in file_e.dimensions.keys():
2678        if dimension[:3] == 'LON':
2679            dim_e_longitude = dimension
2680        if dimension[:3] == 'LAT':
2681            dim_e_latitude = dimension
2682
2683    # get dimensions for file_u
2684    for dimension in file_u.dimensions.keys():
2685        if dimension[:3] == 'LON':
2686            dim_u_longitude = dimension
2687        if dimension[:3] == 'LAT':
2688            dim_u_latitude = dimension
2689        if dimension[:4] == 'TIME':
2690            dim_u_time = dimension
2691           
2692    # get dimensions for file_v
2693    for dimension in file_v.dimensions.keys():
2694        if dimension[:3] == 'LON':
2695            dim_v_longitude = dimension
2696        if dimension[:3] == 'LAT':
2697            dim_v_latitude = dimension
2698        if dimension[:4] == 'TIME':
2699            dim_v_time = dimension
2700
2701
2702    # Precision used by most for lat/lon is 4 or 5 decimals
2703    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2704    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2705
2706    # Check that files are compatible
2707    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2708    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2709    assert allclose(latitudes, e_lat)
2710
2711    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2712    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2713    assert allclose(longitudes, e_lon)
2714
2715    if mint is None:
2716        jmin = 0
2717        mint = times[0]       
2718    else:
2719        jmin = searchsorted(times, mint)
2720       
2721    if maxt is None:
2722        jmax = len(times)
2723        maxt = times[-1]
2724    else:
2725        jmax = searchsorted(times, maxt)
2726
2727    #print "latitudes[:]",latitudes[:]
2728    #print "longitudes[:]",longitudes [:]
2729    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2730                                                  longitudes[:],
2731                                                  minlat, maxlat,
2732                                                  minlon, maxlon)
2733
2734
2735    times = times[jmin:jmax]
2736    latitudes = latitudes[kmin:kmax]
2737    longitudes = longitudes[lmin:lmax]
2738
2739    #print "latitudes[:]",latitudes[:]
2740    #print "longitudes[:]",longitudes [:]
2741
2742    if verbose: print 'cropping'
2743    zname = 'ELEVATION'
2744
2745    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2746    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2747    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2748    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2749
2750    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2751    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2752    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2753    #        from Numeric import asarray
2754    #        elevations=elevations.tolist()
2755    #        elevations.reverse()
2756    #        elevations=asarray(elevations)
2757    #    else:
2758    #        from Numeric import asarray
2759    #        elevations=elevations.tolist()
2760    #        elevations.reverse()
2761    #        elevations=asarray(elevations)
2762    #        'print hmmm'
2763
2764
2765
2766    #Get missing values
2767    nan_ha = file_h.variables['HA'].missing_value[0]
2768    nan_ua = file_u.variables['UA'].missing_value[0]
2769    nan_va = file_v.variables['VA'].missing_value[0]
2770    if hasattr(file_e.variables[zname],'missing_value'):
2771        nan_e  = file_e.variables[zname].missing_value[0]
2772    else:
2773        nan_e = None
2774
2775    #Cleanup
2776    from Numeric import sometrue
2777
2778    missing = (amplitudes == nan_ha)
2779    if sometrue (missing):
2780        if fail_on_NaN:
2781            msg = 'NetCDFFile %s contains missing values'\
2782                  %(basename_in+'_ha.nc')
2783            raise DataMissingValuesError, msg
2784        else:
2785            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2786
2787    missing = (uspeed == nan_ua)
2788    if sometrue (missing):
2789        if fail_on_NaN:
2790            msg = 'NetCDFFile %s contains missing values'\
2791                  %(basename_in+'_ua.nc')
2792            raise DataMissingValuesError, msg
2793        else:
2794            uspeed = uspeed*(missing==0) + missing*NaN_filler
2795
2796    missing = (vspeed == nan_va)
2797    if sometrue (missing):
2798        if fail_on_NaN:
2799            msg = 'NetCDFFile %s contains missing values'\
2800                  %(basename_in+'_va.nc')
2801            raise DataMissingValuesError, msg
2802        else:
2803            vspeed = vspeed*(missing==0) + missing*NaN_filler
2804
2805
2806    missing = (elevations == nan_e)
2807    if sometrue (missing):
2808        if fail_on_NaN:
2809            msg = 'NetCDFFile %s contains missing values'\
2810                  %(basename_in+'_e.nc')
2811            raise DataMissingValuesError, msg
2812        else:
2813            elevations = elevations*(missing==0) + missing*NaN_filler
2814
2815    #######
2816
2817
2818
2819    number_of_times = times.shape[0]
2820    number_of_latitudes = latitudes.shape[0]
2821    number_of_longitudes = longitudes.shape[0]
2822
2823    assert amplitudes.shape[0] == number_of_times
2824    assert amplitudes.shape[1] == number_of_latitudes
2825    assert amplitudes.shape[2] == number_of_longitudes
2826
2827    if verbose:
2828        print '------------------------------------------------'
2829        print 'Statistics:'
2830        print '  Extent (lat/lon):'
2831        print '    lat in [%f, %f], len(lat) == %d'\
2832              %(min(latitudes.flat), max(latitudes.flat),
2833                len(latitudes.flat))
2834        print '    lon in [%f, %f], len(lon) == %d'\
2835              %(min(longitudes.flat), max(longitudes.flat),
2836                len(longitudes.flat))
2837        print '    t in [%f, %f], len(t) == %d'\
2838              %(min(times.flat), max(times.flat), len(times.flat))
2839
2840        q = amplitudes.flat
2841        name = 'Amplitudes (ha) [cm]'
2842        print %s in [%f, %f]' %(name, min(q), max(q))
2843
2844        q = uspeed.flat
2845        name = 'Speeds (ua) [cm/s]'
2846        print %s in [%f, %f]' %(name, min(q), max(q))
2847
2848        q = vspeed.flat
2849        name = 'Speeds (va) [cm/s]'
2850        print %s in [%f, %f]' %(name, min(q), max(q))
2851
2852        q = elevations.flat
2853        name = 'Elevations (e) [m]'
2854        print %s in [%f, %f]' %(name, min(q), max(q))
2855
2856
2857    # print number_of_latitudes, number_of_longitudes
2858    number_of_points = number_of_latitudes*number_of_longitudes
2859    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2860
2861
2862    file_h.close()
2863    file_u.close()
2864    file_v.close()
2865    file_e.close()
2866
2867
2868    # NetCDF file definition
2869    outfile = NetCDFFile(swwname, 'w')
2870
2871    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2872                  %(basename_in + '_ha.nc',
2873                    basename_in + '_ua.nc',
2874                    basename_in + '_va.nc',
2875                    basename_in + '_e.nc')
2876   
2877    # Create new file
2878    starttime = times[0]
2879    sww = Write_sww()
2880    sww.store_header(outfile, times, number_of_volumes,
2881                     number_of_points, description=description,
2882                     verbose=verbose)
2883
2884    # Store
2885    from anuga.coordinate_transforms.redfearn import redfearn
2886    x = zeros(number_of_points, Float)  #Easting
2887    y = zeros(number_of_points, Float)  #Northing
2888
2889
2890    if verbose: print 'Making triangular grid'
2891
2892    # Check zone boundaries
2893    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2894
2895    vertices = {}
2896    i = 0
2897    for k, lat in enumerate(latitudes):       #Y direction
2898        for l, lon in enumerate(longitudes):  #X direction
2899
2900            vertices[l,k] = i
2901
2902            zone, easting, northing = redfearn(lat,lon)
2903
2904            msg = 'Zone boundary crossed at longitude =', lon
2905            #assert zone == refzone, msg
2906            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2907            x[i] = easting
2908            y[i] = northing
2909            i += 1
2910
2911    #Construct 2 triangles per 'rectangular' element
2912    volumes = []
2913    for l in range(number_of_longitudes-1):    #X direction
2914        for k in range(number_of_latitudes-1): #Y direction
2915            v1 = vertices[l,k+1]
2916            v2 = vertices[l,k]
2917            v3 = vertices[l+1,k+1]
2918            v4 = vertices[l+1,k]
2919
2920            volumes.append([v1,v2,v3]) #Upper element
2921            volumes.append([v4,v3,v2]) #Lower element
2922
2923    volumes = array(volumes)
2924
2925    if origin is None:
2926        origin = Geo_reference(refzone,min(x),min(y))
2927    geo_ref = write_NetCDF_georeference(origin, outfile)
2928   
2929    if elevation is not None:
2930        z = elevation
2931    else:
2932        if inverted_bathymetry:
2933            z = -1*elevations
2934        else:
2935            z = elevations
2936    #FIXME: z should be obtained from MOST and passed in here
2937
2938    from Numeric import resize
2939    z = resize(z,outfile.variables['z'][:].shape)
2940    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2941    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
2942    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2943    outfile.variables['elevation'][:] = z
2944    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2945
2946
2947
2948    #Time stepping
2949    stage = outfile.variables['stage']
2950    xmomentum = outfile.variables['xmomentum']
2951    ymomentum = outfile.variables['ymomentum']
2952
2953    if verbose: print 'Converting quantities'
2954    n = len(times)
2955    for j in range(n):
2956        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2957        i = 0
2958        for k in range(number_of_latitudes):      #Y direction
2959            for l in range(number_of_longitudes): #X direction
2960                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2961                stage[j,i] = w
2962                h = w - z[i]
2963                xmomentum[j,i] = uspeed[j,k,l]/100*h
2964                ymomentum[j,i] = vspeed[j,k,l]/100*h
2965                i += 1
2966
2967    #outfile.close()
2968
2969    #FIXME: Refactor using code from file_function.statistics
2970    #Something like print swwstats(swwname)
2971    if verbose:
2972        x = outfile.variables['x'][:]
2973        y = outfile.variables['y'][:]
2974        print '------------------------------------------------'
2975        print 'Statistics of output file:'
2976        print '  Name: %s' %swwname
2977        print '  Reference:'
2978        print '    Lower left corner: [%f, %f]'\
2979              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
2980        print ' Start time: %f' %starttime
2981        print '    Min time: %f' %mint
2982        print '    Max time: %f' %maxt
2983        print '  Extent:'
2984        print '    x [m] in [%f, %f], len(x) == %d'\
2985              %(min(x.flat), max(x.flat), len(x.flat))
2986        print '    y [m] in [%f, %f], len(y) == %d'\
2987              %(min(y.flat), max(y.flat), len(y.flat))
2988        print '    t [s] in [%f, %f], len(t) == %d'\
2989              %(min(times), max(times), len(times))
2990        print '  Quantities [SI units]:'
2991        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2992            q = outfile.variables[name][:].flat
2993            print '    %s in [%f, %f]' %(name, min(q), max(q))
2994
2995
2996
2997    outfile.close()
2998
2999
3000
3001
3002
3003def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3004    """Template for converting typical text files with time series to
3005    NetCDF tms file.
3006
3007
3008    The file format is assumed to be either two fields separated by a comma:
3009
3010        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3011
3012    E.g
3013
3014      31/08/04 00:00:00, 1.328223 0 0
3015      31/08/04 00:15:00, 1.292912 0 0
3016
3017    or time (seconds), value0 value1 value2 ...
3018   
3019      0.0, 1.328223 0 0
3020      0.1, 1.292912 0 0
3021
3022    will provide a time dependent function f(t) with three attributes
3023
3024    filename is assumed to be the rootname with extenisons .txt and .sww
3025    """
3026
3027    import time, calendar
3028    from Numeric import array
3029    from anuga.config import time_format
3030    from anuga.utilities.numerical_tools import ensure_numeric
3031
3032
3033
3034    fid = open(filename + '.txt')
3035    line = fid.readline()
3036    fid.close()
3037
3038    fields = line.split(',')
3039    msg = 'File %s must have the format date, value0 value1 value2 ...'
3040    assert len(fields) == 2, msg
3041
3042    if not time_as_seconds:
3043        try:
3044            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3045        except ValueError:
3046            msg = 'First field in file %s must be' %filename
3047            msg += ' date-time with format %s.\n' %time_format
3048            msg += 'I got %s instead.' %fields[0]
3049            raise DataTimeError, msg
3050    else:
3051        try:
3052            starttime = float(fields[0])
3053        except Error:
3054            msg = "Bad time format"
3055            raise DataTimeError, msg
3056
3057
3058    #Split values
3059    values = []
3060    for value in fields[1].split():
3061        values.append(float(value))
3062
3063    q = ensure_numeric(values)
3064
3065    msg = 'ERROR: File must contain at least one independent value'
3066    assert len(q.shape) == 1, msg
3067
3068
3069
3070    #Read times proper
3071    from Numeric import zeros, Float, alltrue
3072    from anuga.config import time_format
3073    import time, calendar
3074
3075    fid = open(filename + '.txt')
3076    lines = fid.readlines()
3077    fid.close()
3078
3079    N = len(lines)
3080    d = len(q)
3081
3082    T = zeros(N, Float)       #Time
3083    Q = zeros((N, d), Float)  #Values
3084
3085    for i, line in enumerate(lines):
3086        fields = line.split(',')
3087        if not time_as_seconds:
3088            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3089        else:
3090             realtime = float(fields[0])
3091        T[i] = realtime - starttime
3092
3093        for j, value in enumerate(fields[1].split()):
3094            Q[i, j] = float(value)
3095
3096    msg = 'File %s must list time as a monotonuosly ' %filename
3097    msg += 'increasing sequence'
3098    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3099
3100    #Create NetCDF file
3101    from Scientific.IO.NetCDF import NetCDFFile
3102
3103    fid = NetCDFFile(filename + '.tms', 'w')
3104
3105
3106    fid.institution = 'Geoscience Australia'
3107    fid.description = 'Time series'
3108
3109
3110    #Reference point
3111    #Start time in seconds since the epoch (midnight 1/1/1970)
3112    #FIXME: Use Georef
3113    fid.starttime = starttime
3114
3115    # dimension definitions
3116    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3117    #fid.createDimension('number_of_vertices', 3)
3118
3119
3120    fid.createDimension('number_of_timesteps', len(T))
3121
3122    fid.createVariable('time', Float, ('number_of_timesteps',))
3123
3124    fid.variables['time'][:] = T
3125
3126    for i in range(Q.shape[1]):
3127        try:
3128            name = quantity_names[i]
3129        except:
3130            name = 'Attribute%d'%i
3131
3132        fid.createVariable(name, Float, ('number_of_timesteps',))
3133        fid.variables[name][:] = Q[:,i]
3134
3135    fid.close()
3136
3137
3138def extent_sww(file_name):
3139    """
3140    Read in an sww file.
3141
3142    Input;
3143    file_name - the sww file
3144
3145    Output;
3146    z - Vector of bed elevation
3147    volumes - Array.  Each row has 3 values, representing
3148    the vertices that define the volume
3149    time - Vector of the times where there is stage information
3150    stage - array with respect to time and vertices (x,y)
3151    """
3152
3153
3154    from Scientific.IO.NetCDF import NetCDFFile
3155
3156    #Check contents
3157    #Get NetCDF
3158    fid = NetCDFFile(file_name, 'r')
3159
3160    # Get the variables
3161    x = fid.variables['x'][:]
3162    y = fid.variables['y'][:]
3163    stage = fid.variables['stage'][:]
3164    #print "stage",stage
3165    #print "stage.shap",stage.shape
3166    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3167    #print "min(stage)",min(stage)
3168
3169    fid.close()
3170
3171    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3172
3173
3174def sww2domain(filename,boundary=None,t=None,\
3175               fail_if_NaN=True,NaN_filler=0\
3176               ,verbose = False,very_verbose = False):
3177    """
3178    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3179
3180    Boundary is not recommended if domain.smooth is not selected, as it
3181    uses unique coordinates, but not unique boundaries. This means that
3182    the boundary file will not be compatable with the coordinates, and will
3183    give a different final boundary, or crash.
3184    """
3185    NaN=9.969209968386869e+036
3186    #initialise NaN.
3187
3188    from Scientific.IO.NetCDF import NetCDFFile
3189    from shallow_water import Domain
3190    from Numeric import asarray, transpose, resize
3191
3192    if verbose: print 'Reading from ', filename
3193    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3194    time = fid.variables['time']       #Timesteps
3195    if t is None:
3196        t = time[-1]
3197    time_interp = get_time_interp(time,t)
3198
3199    # Get the variables as Numeric arrays
3200    x = fid.variables['x'][:]             #x-coordinates of vertices
3201    y = fid.variables['y'][:]             #y-coordinates of vertices
3202    elevation = fid.variables['elevation']     #Elevation
3203    stage = fid.variables['stage']     #Water level
3204    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3205    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3206
3207    starttime = fid.starttime[0]
3208    volumes = fid.variables['volumes'][:] #Connectivity
3209    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3210
3211    conserved_quantities = []
3212    interpolated_quantities = {}
3213    other_quantities = []
3214
3215    # get geo_reference
3216    #sww files don't have to have a geo_ref
3217    try:
3218        geo_reference = Geo_reference(NetCDFObject=fid)
3219    except: #AttributeError, e:
3220        geo_reference = None
3221
3222    if verbose: print '    getting quantities'
3223    for quantity in fid.variables.keys():
3224        dimensions = fid.variables[quantity].dimensions
3225        if 'number_of_timesteps' in dimensions:
3226            conserved_quantities.append(quantity)
3227            interpolated_quantities[quantity]=\
3228                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3229        else: other_quantities.append(quantity)
3230
3231    other_quantities.remove('x')
3232    other_quantities.remove('y')
3233    other_quantities.remove('z')
3234    other_quantities.remove('volumes')
3235    try:
3236        other_quantities.remove('stage_range')
3237        other_quantities.remove('xmomentum_range')
3238        other_quantities.remove('ymomentum_range')
3239        other_quantities.remove('elevation_range')
3240    except:
3241        pass
3242       
3243
3244    conserved_quantities.remove('time')
3245
3246    if verbose: print '    building domain'
3247    #    From domain.Domain:
3248    #    domain = Domain(coordinates, volumes,\
3249    #                    conserved_quantities = conserved_quantities,\
3250    #                    other_quantities = other_quantities,zone=zone,\
3251    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3252
3253    #   From shallow_water.Domain:
3254    coordinates=coordinates.tolist()
3255    volumes=volumes.tolist()
3256    #FIXME:should this be in mesh?(peter row)
3257    if fid.smoothing == 'Yes': unique = False
3258    else: unique = True
3259    if unique:
3260        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3261
3262
3263    try:
3264        domain = Domain(coordinates, volumes, boundary)
3265    except AssertionError, e:
3266        fid.close()
3267        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3268        raise DataDomainError, msg
3269
3270    if not boundary is None:
3271        domain.boundary = boundary
3272
3273    domain.geo_reference = geo_reference
3274
3275    domain.starttime=float(starttime)+float(t)
3276    domain.time=0.0
3277
3278    for quantity in other_quantities:
3279        try:
3280            NaN = fid.variables[quantity].missing_value
3281        except:
3282            pass #quantity has no missing_value number
3283        X = fid.variables[quantity][:]
3284        if very_verbose:
3285            print '       ',quantity
3286            print '        NaN =',NaN
3287            print '        max(X)'
3288            print '       ',max(X)
3289            print '        max(X)==NaN'
3290            print '       ',max(X)==NaN
3291            print ''
3292        if (max(X)==NaN) or (min(X)==NaN):
3293            if fail_if_NaN:
3294                msg = 'quantity "%s" contains no_data entry'%quantity
3295                raise DataMissingValuesError, msg
3296            else:
3297                data = (X<>NaN)
3298                X = (X*data)+(data==0)*NaN_filler
3299        if unique:
3300            X = resize(X,(len(X)/3,3))
3301        domain.set_quantity(quantity,X)
3302    #
3303    for quantity in conserved_quantities:
3304        try:
3305            NaN = fid.variables[quantity].missing_value
3306        except:
3307            pass #quantity has no missing_value number
3308        X = interpolated_quantities[quantity]
3309        if very_verbose:
3310            print '       ',quantity
3311            print '        NaN =',NaN
3312            print '        max(X)'
3313            print '       ',max(X)
3314            print '        max(X)==NaN'
3315            print '       ',max(X)==NaN
3316            print ''
3317        if (max(X)==NaN) or (min(X)==NaN):
3318            if fail_if_NaN:
3319                msg = 'quantity "%s" contains no_data entry'%quantity
3320                raise DataMissingValuesError, msg
3321            else:
3322                data = (X<>NaN)
3323                X = (X*data)+(data==0)*NaN_filler
3324        if unique:
3325            X = resize(X,(X.shape[0]/3,3))
3326        domain.set_quantity(quantity,X)
3327
3328    fid.close()
3329    return domain
3330
3331def interpolated_quantity(saved_quantity,time_interp):
3332
3333    #given an index and ratio, interpolate quantity with respect to time.
3334    index,ratio = time_interp
3335    Q = saved_quantity
3336    if ratio > 0:
3337        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3338    else:
3339        q = Q[index]
3340    #Return vector of interpolated values
3341    return q
3342
3343def get_time_interp(time,t=None):
3344    #Finds the ratio and index for time interpolation.
3345    #It is borrowed from previous abstract_2d_finite_volumes code.
3346    if t is None:
3347        t=time[-1]
3348        index = -1
3349        ratio = 0.
3350    else:
3351        T = time
3352        tau = t
3353        index=0
3354        msg = 'Time interval derived from file %s [%s:%s]'\
3355            %('FIXMEfilename', T[0], T[-1])
3356        msg += ' does not match model time: %s' %tau
3357        if tau < time[0]: raise DataTimeError, msg
3358        if tau > time[-1]: raise DataTimeError, msg
3359        while tau > time[index]: index += 1
3360        while tau < time[index]: index -= 1
3361        if tau == time[index]:
3362            #Protect against case where tau == time[-1] (last time)
3363            # - also works in general when tau == time[i]
3364            ratio = 0
3365        else:
3366            #t is now between index and index+1
3367            ratio = (tau - time[index])/(time[index+1] - time[index])
3368    return (index,ratio)
3369
3370
3371def weed(coordinates,volumes,boundary = None):
3372    if type(coordinates)==ArrayType:
3373        coordinates = coordinates.tolist()
3374    if type(volumes)==ArrayType:
3375        volumes = volumes.tolist()
3376
3377    unique = False
3378    point_dict = {}
3379    same_point = {}
3380    for i in range(len(coordinates)):
3381        point = tuple(coordinates[i])
3382        if point_dict.has_key(point):
3383            unique = True
3384            same_point[i]=point
3385            #to change all point i references to point j
3386        else:
3387            point_dict[point]=i
3388            same_point[i]=point
3389
3390    coordinates = []
3391    i = 0
3392    for point in point_dict.keys():
3393        point = tuple(point)
3394        coordinates.append(list(point))
3395        point_dict[point]=i
3396        i+=1
3397
3398
3399    for volume in volumes:
3400        for i in range(len(volume)):
3401            index = volume[i]
3402            if index>-1:
3403                volume[i]=point_dict[same_point[index]]
3404
3405    new_boundary = {}
3406    if not boundary is None:
3407        for segment in boundary.keys():
3408            point0 = point_dict[same_point[segment[0]]]
3409            point1 = point_dict[same_point[segment[1]]]
3410            label = boundary[segment]
3411            #FIXME should the bounday attributes be concaterated
3412            #('exterior, pond') or replaced ('pond')(peter row)
3413
3414            if new_boundary.has_key((point0,point1)):
3415                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3416                                              #+','+label
3417
3418            elif new_boundary.has_key((point1,point0)):
3419                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3420                                              #+','+label
3421            else: new_boundary[(point0,point1)]=label
3422
3423        boundary = new_boundary
3424
3425    return coordinates,volumes,boundary
3426
3427
3428def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3429                 verbose=False):
3430    """Read Digitial Elevation model from the following NetCDF format (.dem)
3431
3432    Example:
3433
3434    ncols         3121
3435    nrows         1800
3436    xllcorner     722000
3437    yllcorner     5893000
3438    cellsize      25
3439    NODATA_value  -9999
3440    138.3698 137.4194 136.5062 135.5558 ..........
3441
3442    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3443    """
3444
3445    import os
3446    from Scientific.IO.NetCDF import NetCDFFile
3447    from Numeric import Float, zeros, sum, reshape, equal
3448
3449    root = basename_in
3450    inname = root + '.dem'
3451
3452    #Open existing netcdf file to read
3453    infile = NetCDFFile(inname, 'r')
3454    if verbose: print 'Reading DEM from %s' %inname
3455
3456    #Read metadata
3457    ncols = infile.ncols[0]
3458    nrows = infile.nrows[0]
3459    xllcorner = infile.xllcorner[0]
3460    yllcorner = infile.yllcorner[0]
3461    cellsize = infile.cellsize[0]
3462    NODATA_value = infile.NODATA_value[0]
3463    zone = infile.zone[0]
3464    false_easting = infile.false_easting[0]
3465    false_northing = infile.false_northing[0]
3466    projection = infile.projection
3467    datum = infile.datum
3468    units = infile.units
3469
3470    dem_elevation = infile.variables['elevation']
3471
3472    #Get output file name
3473    if basename_out == None:
3474        outname = root + '_' + repr(cellsize_new) + '.dem'
3475    else:
3476        outname = basename_out + '.dem'
3477
3478    if verbose: print 'Write decimated NetCDF file to %s' %outname
3479
3480    #Determine some dimensions for decimated grid
3481    (nrows_stencil, ncols_stencil) = stencil.shape
3482    x_offset = ncols_stencil / 2
3483    y_offset = nrows_stencil / 2
3484    cellsize_ratio = int(cellsize_new / cellsize)
3485    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3486    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3487
3488    #Open netcdf file for output
3489    outfile = NetCDFFile(outname, 'w')
3490
3491    #Create new file
3492    outfile.institution = 'Geoscience Australia'
3493    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3494                           'of spatial point data'
3495    #Georeferencing
3496    outfile.zone = zone
3497    outfile.projection = projection
3498    outfile.datum = datum
3499    outfile.units = units
3500
3501    outfile.cellsize = cellsize_new
3502    outfile.NODATA_value = NODATA_value
3503    outfile.false_easting = false_easting
3504    outfile.false_northing = false_northing
3505
3506    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3507    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3508    outfile.ncols = ncols_new
3509    outfile.nrows = nrows_new
3510
3511    # dimension definition
3512    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3513
3514    # variable definition
3515    outfile.createVariable('elevation', Float, ('number_of_points',))
3516
3517    # Get handle to the variable
3518    elevation = outfile.variables['elevation']
3519
3520    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3521
3522    #Store data
3523    global_index = 0
3524    for i in range(nrows_new):
3525        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3526        lower_index = global_index
3527        telev =  zeros(ncols_new, Float)
3528        local_index = 0
3529        trow = i * cellsize_ratio
3530
3531        for j in range(ncols_new):
3532            tcol = j * cellsize_ratio
3533            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3534
3535            #if dem contains 1 or more NODATA_values set value in
3536            #decimated dem to NODATA_value, else compute decimated
3537            #value using stencil
3538            if sum(sum(equal(tmp, NODATA_value))) > 0:
3539                telev[local_index] = NODATA_value
3540            else:
3541                telev[local_index] = sum(sum(tmp * stencil))
3542
3543            global_index += 1
3544            local_index += 1
3545
3546        upper_index = global_index
3547
3548        elevation[lower_index:upper_index] = telev
3549
3550    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3551
3552    infile.close()
3553    outfile.close()
3554
3555
3556
3557
3558def tsh2sww(filename, verbose=False): #test_tsh2sww
3559    """
3560    to check if a tsh/msh file 'looks' good.
3561    """
3562
3563
3564    if verbose == True:print 'Creating domain from', filename
3565    domain = pmesh_to_domain_instance(filename, Domain)
3566    if verbose == True:print "Number of triangles = ", len(domain)
3567
3568    domain.smooth = True
3569    domain.format = 'sww'   #Native netcdf visualisation format
3570    file_path, filename = path.split(filename)
3571    filename, ext = path.splitext(filename)
3572    domain.set_name(filename)   
3573    domain.reduction = mean
3574    if verbose == True:print "file_path",file_path
3575    if file_path == "":file_path = "."
3576    domain.set_datadir(file_path)
3577
3578    if verbose == True:
3579        print "Output written to " + domain.get_datadir() + sep + \
3580              domain.get_name() + "." + domain.format
3581    sww = get_dataobject(domain)
3582    sww.store_connectivity()
3583    sww.store_timestep('stage')
3584
3585
3586def asc_csiro2sww(bath_dir,
3587                  elevation_dir,
3588                  ucur_dir,
3589                  vcur_dir,
3590                  sww_file,
3591                  minlat = None, maxlat = None,
3592                  minlon = None, maxlon = None,
3593                  zscale=1,
3594                  mean_stage = 0,
3595                  fail_on_NaN = True,
3596                  elevation_NaN_filler = 0,
3597                  bath_prefix='ba',
3598                  elevation_prefix='el',
3599                  verbose=False):
3600    """
3601    Produce an sww boundary file, from esri ascii data from CSIRO.
3602
3603    Also convert latitude and longitude to UTM. All coordinates are
3604    assumed to be given in the GDA94 datum.
3605
3606    assume:
3607    All files are in esri ascii format
3608
3609    4 types of information
3610    bathymetry
3611    elevation
3612    u velocity
3613    v velocity
3614
3615    Assumptions
3616    The metadata of all the files is the same
3617    Each type is in a seperate directory
3618    One bath file with extention .000
3619    The time period is less than 24hrs and uniform.
3620    """
3621    from Scientific.IO.NetCDF import NetCDFFile
3622
3623    from anuga.coordinate_transforms.redfearn import redfearn
3624
3625    precision = Float # So if we want to change the precision its done here
3626
3627    # go in to the bath dir and load the only file,
3628    bath_files = os.listdir(bath_dir)
3629
3630    bath_file = bath_files[0]
3631    bath_dir_file =  bath_dir + os.sep + bath_file
3632    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3633
3634    #Use the date.time of the bath file as a basis for
3635    #the start time for other files
3636    base_start = bath_file[-12:]
3637
3638    #go into the elevation dir and load the 000 file
3639    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3640                         + base_start
3641
3642    elevation_files = os.listdir(elevation_dir)
3643    ucur_files = os.listdir(ucur_dir)
3644    vcur_files = os.listdir(vcur_dir)
3645    elevation_files.sort()
3646    # the first elevation file should be the
3647    # file with the same base name as the bath data
3648    assert elevation_files[0] == 'el' + base_start
3649
3650    number_of_latitudes = bath_grid.shape[0]
3651    number_of_longitudes = bath_grid.shape[1]
3652    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3653
3654    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3655                  for x in range(number_of_longitudes)]
3656    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3657                 for y in range(number_of_latitudes)]
3658
3659     # reverse order of lat, so the fist lat represents the first grid row
3660    latitudes.reverse()
3661
3662    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3663                                                 minlat=minlat, maxlat=maxlat,
3664                                                 minlon=minlon, maxlon=maxlon)
3665
3666
3667    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3668    latitudes = latitudes[kmin:kmax]
3669    longitudes = longitudes[lmin:lmax]
3670    number_of_latitudes = len(latitudes)
3671    number_of_longitudes = len(longitudes)
3672    number_of_times = len(os.listdir(elevation_dir))
3673    number_of_points = number_of_latitudes*number_of_longitudes
3674    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3675
3676    #Work out the times
3677    if len(elevation_files) > 1:
3678        # Assume: The time period is less than 24hrs.
3679        time_period = (int(elevation_files[1][-3:]) - \
3680                      int(elevation_files[0][-3:]))*60*60
3681        times = [x*time_period for x in range(len(elevation_files))]
3682    else:
3683        times = [0.0]
3684
3685
3686    if verbose:
3687        print '------------------------------------------------'
3688        print 'Statistics:'
3689        print '  Extent (lat/lon):'
3690        print '    lat in [%f, %f], len(lat) == %d'\
3691              %(min(latitudes), max(latitudes),
3692                len(latitudes))
3693        print '    lon in [%f, %f], len(lon) == %d'\
3694              %(min(longitudes), max(longitudes),
3695                len(longitudes))
3696        print '    t in [%f, %f], len(t) == %d'\
3697              %(min(times), max(times), len(times))
3698
3699    ######### WRITE THE SWW FILE #############
3700    # NetCDF file definition
3701    outfile = NetCDFFile(sww_file, 'w')
3702
3703    #Create new file
3704    outfile.institution = 'Geoscience Australia'
3705    outfile.description = 'Converted from XXX'
3706
3707
3708    #For sww compatibility
3709    outfile.smoothing = 'Yes'
3710    outfile.order = 1
3711
3712    #Start time in seconds since the epoch (midnight 1/1/1970)
3713    outfile.starttime = starttime = times[0]
3714
3715
3716    # dimension definitions
3717    outfile.createDimension('number_of_volumes', number_of_volumes)
3718
3719    outfile.createDimension('number_of_vertices', 3)
3720    outfile.createDimension('number_of_points', number_of_points)
3721    outfile.createDimension('number_of_timesteps', number_of_times)
3722
3723    # variable definitions
3724    outfile.createVariable('x', precision, ('number_of_points',))
3725    outfile.createVariable('y', precision, ('number_of_points',))
3726    outfile.createVariable('elevation', precision, ('number_of_points',))
3727
3728    #FIXME: Backwards compatibility
3729    outfile.createVariable('z', precision, ('number_of_points',))
3730    #################################
3731
3732    outfile.createVariable('volumes', Int, ('number_of_volumes',
3733                                            'number_of_vertices'))
3734
3735    outfile.createVariable('time', precision,
3736                           ('number_of_timesteps',))
3737
3738    outfile.createVariable('stage', precision,
3739                           ('number_of_timesteps',
3740                            'number_of_points'))
3741
3742    outfile.createVariable('xmomentum', precision,
3743                           ('number_of_timesteps',
3744                            'number_of_points'))
3745
3746    outfile.createVariable('ymomentum', precision,
3747                           ('number_of_timesteps',
3748                            'number_of_points'))
3749
3750    #Store
3751    from anuga.coordinate_transforms.redfearn import redfearn
3752    x = zeros(number_of_points, Float)  #Easting
3753    y = zeros(number_of_points, Float)  #Northing
3754
3755    if verbose: print 'Making triangular grid'
3756    #Get zone of 1st point.
3757    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3758
3759    vertices = {}
3760    i = 0
3761    for k, lat in enumerate(latitudes):
3762        for l, lon in enumerate(longitudes):
3763
3764            vertices[l,k] = i
3765
3766            zone, easting, northing = redfearn(lat,lon)
3767
3768            msg = 'Zone boundary crossed at longitude =', lon
3769            #assert zone == refzone, msg
3770            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3771            x[i] = easting
3772            y[i] = northing
3773            i += 1
3774
3775
3776    #Construct 2 triangles per 'rectangular' element
3777    volumes = []
3778    for l in range(number_of_longitudes-1):    #X direction
3779        for k in range(number_of_latitudes-1): #Y direction
3780            v1 = vertices[l,k+1]
3781            v2 = vertices[l,k]
3782            v3 = vertices[l+1,k+1]
3783            v4 = vertices[l+1,k]
3784
3785            #Note, this is different to the ferrit2sww code
3786            #since the order of the lats is reversed.
3787            volumes.append([v1,v3,v2]) #Upper element
3788            volumes.append([v4,v2,v3]) #Lower element
3789
3790    volumes = array(volumes)
3791
3792    geo_ref = Geo_reference(refzone,min(x),min(y))
3793    geo_ref.write_NetCDF(outfile)
3794
3795    # This will put the geo ref in the middle
3796    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3797
3798
3799    if verbose:
3800        print '------------------------------------------------'
3801        print 'More Statistics:'
3802        print '  Extent (/lon):'
3803        print '    x in [%f, %f], len(lat) == %d'\
3804              %(min(x), max(x),
3805                len(x))
3806        print '    y in [%f, %f], len(lon) == %d'\
3807              %(min(y), max(y),
3808                len(y))
3809        print 'geo_ref: ',geo_ref
3810
3811    z = resize(bath_grid,outfile.variables['z'][:].shape)
3812    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3813    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3814    outfile.variables['z'][:] = z
3815    outfile.variables['elevation'][:] = z  #FIXME HACK
3816    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3817
3818    stage = outfile.variables['stage']
3819    xmomentum = outfile.variables['xmomentum']
3820    ymomentum = outfile.variables['ymomentum']
3821
3822    outfile.variables['time'][:] = times   #Store time relative
3823
3824    if verbose: print 'Converting quantities'
3825    n = number_of_times
3826    for j in range(number_of_times):
3827        # load in files
3828        elevation_meta, elevation_grid = \
3829           _read_asc(elevation_dir + os.sep + elevation_files[j])
3830
3831        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3832        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3833
3834        #cut matrix to desired size
3835        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3836        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3837        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3838       
3839        # handle missing values
3840        missing = (elevation_grid == elevation_meta['NODATA_value'])
3841        if sometrue (missing):
3842            if fail_on_NaN:
3843                msg = 'File %s contains missing values'\
3844                      %(elevation_files[j])
3845                raise DataMissingValuesError, msg
3846            else:
3847                elevation_grid = elevation_grid*(missing==0) + \
3848                                 missing*elevation_NaN_filler
3849
3850
3851        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3852        i = 0
3853        for k in range(number_of_latitudes):      #Y direction
3854            for l in range(number_of_longitudes): #X direction
3855                w = zscale*elevation_grid[k,l] + mean_stage
3856                stage[j,i] = w
3857                h = w - z[i]
3858                xmomentum[j,i] = u_momentum_grid[k,l]*h
3859                ymomentum[j,i] = v_momentum_grid[k,l]*h
3860                i += 1
3861    outfile.close()
3862
3863def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3864                        minlat=None, maxlat=None,
3865                        minlon=None, maxlon=None):
3866    """
3867    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3868    specified with min/max lat/long
3869
3870    Think of the latitudes and longitudes describing a 2d surface.
3871    The area returned is, if possible, just big enough to cover the
3872    inputed max/min area. (This will not be possible if the max/min area
3873    has a section outside of the latitudes/longitudes area.)
3874
3875    asset  longitudes are sorted,
3876    long - from low to high (west to east, eg 148 - 151)
3877    assert latitudes are sorted, ascending or decending
3878    """
3879    latitudes = latitudes_ref[:]
3880    longitudes = longitudes_ref[:]
3881
3882    latitudes = ensure_numeric(latitudes)
3883    longitudes = ensure_numeric(longitudes)
3884   
3885    assert allclose(sort(longitudes), longitudes)
3886   
3887    lat_ascending = True
3888    if not allclose(sort(latitudes), latitudes):
3889        lat_ascending = False
3890        # reverse order of lat, so it's in ascending order         
3891        latitudes = latitudes[::-1]
3892        assert allclose(sort(latitudes), latitudes)
3893    #print "latitudes  in funct", latitudes
3894   
3895    largest_lat_index = len(latitudes)-1
3896    #Cut out a smaller extent.
3897    if minlat == None:
3898        lat_min_index = 0
3899    else:
3900        lat_min_index = searchsorted(latitudes, minlat)-1
3901        if lat_min_index <0:
3902            lat_min_index = 0
3903
3904
3905    if maxlat == None:
3906        lat_max_index = largest_lat_index #len(latitudes)
3907    else:
3908        lat_max_index = searchsorted(latitudes, maxlat)
3909        if lat_max_index > largest_lat_index:
3910            lat_max_index = largest_lat_index
3911
3912    if minlon == None:
3913        lon_min_index = 0
3914    else:
3915        lon_min_index = searchsorted(longitudes, minlon)-1
3916        if lon_min_index <0:
3917            lon_min_index = 0
3918
3919    if maxlon == None:
3920        lon_max_index = len(longitudes)
3921    else:
3922        lon_max_index = searchsorted(longitudes, maxlon)
3923
3924    # Reversing the indexes, if the lat array is decending
3925    if lat_ascending is False:
3926        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3927                                       largest_lat_index - lat_min_index
3928    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3929    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3930
3931    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3932
3933
3934def _read_asc(filename, verbose=False):
3935    """Read esri file from the following ASCII format (.asc)
3936
3937    Example:
3938
3939    ncols         3121
3940    nrows         1800
3941    xllcorner     722000
3942    yllcorner     5893000
3943    cellsize      25
3944    NODATA_value  -9999
3945    138.3698 137.4194 136.5062 135.5558 ..........
3946
3947    """
3948
3949    datafile = open(filename)
3950
3951    if verbose: print 'Reading DEM from %s' %(filename)
3952    lines = datafile.readlines()
3953    datafile.close()
3954
3955    if verbose: print 'Got', len(lines), ' lines'
3956
3957    ncols = int(lines.pop(0).split()[1].strip())
3958    nrows = int(lines.pop(0).split()[1].strip())
3959    xllcorner = float(lines.pop(0).split()[1].strip())
3960    yllcorner = float(lines.pop(0).split()[1].strip())
3961    cellsize = float(lines.pop(0).split()[1].strip())
3962    NODATA_value = float(lines.pop(0).split()[1].strip())
3963
3964    assert len(lines) == nrows
3965
3966    #Store data
3967    grid = []
3968
3969    n = len(lines)
3970    for i, line in enumerate(lines):
3971        cells = line.split()
3972        assert len(cells) == ncols
3973        grid.append(array([float(x) for x in cells]))
3974    grid = array(grid)
3975
3976    return {'xllcorner':xllcorner,
3977            'yllcorner':yllcorner,
3978            'cellsize':cellsize,
3979            'NODATA_value':NODATA_value}, grid
3980
3981
3982
3983    ####  URS 2 SWW  ###
3984
3985lon_name = 'LON'
3986lat_name = 'LAT'
3987time_name = 'TIME'
3988precision = Float # So if we want to change the precision its done here       
3989class Write_nc:
3990    """
3991    Write an nc file.
3992   
3993    Note, this should be checked to meet cdc netcdf conventions for gridded
3994    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
3995   
3996    """
3997    def __init__(self,
3998                 quantity_name,
3999                 file_name,
4000                 time_step_count,
4001                 time_step,
4002                 lon,
4003                 lat):
4004        """
4005        time_step_count is the number of time steps.
4006        time_step is the time step size
4007       
4008        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4009        """
4010        self.quantity_name = quantity_name
4011        quantity_units = {'HA':'CENTIMETERS',
4012                              'UA':'CENTIMETERS/SECOND',
4013                              'VA':'CENTIMETERS/SECOND'}       
4014       
4015        multiplier_dic = {'HA':100.0, # To convert from m to cm
4016                              'UA':100.0,  #  m/s to cm/sec
4017                              'VA':-100.0}  # MUX files have positve x in the
4018        # Southern direction.  This corrects for it, when writing nc files.
4019       
4020        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4021       
4022        #self.file_name = file_name
4023        self.time_step_count = time_step_count
4024        self.time_step = time_step
4025
4026        # NetCDF file definition
4027        self.outfile = NetCDFFile(file_name, 'w')
4028        outfile = self.outfile       
4029
4030        #Create new file
4031        nc_lon_lat_header(outfile, lon, lat)
4032   
4033        # TIME
4034        outfile.createDimension(time_name, None)
4035        outfile.createVariable(time_name, precision, (time_name,))
4036
4037        #QUANTITY
4038        outfile.createVariable(self.quantity_name, precision,
4039                               (time_name, lat_name, lon_name))
4040        outfile.variables[self.quantity_name].missing_value=-1.e+034
4041        outfile.variables[self.quantity_name].units= \
4042                                 quantity_units[self.quantity_name]
4043        outfile.variables[lon_name][:]= ensure_numeric(lon)
4044        outfile.variables[lat_name][:]= ensure_numeric(lat)
4045
4046        #Assume no one will be wanting to read this, while we are writing
4047        #outfile.close()
4048       
4049    def store_timestep(self, quantity_slice):
4050        """
4051        Write a time slice of quantity info
4052        quantity_slice is the data to be stored at this time step
4053        """
4054       
4055        outfile = self.outfile
4056       
4057        # Get the variables
4058        time = outfile.variables[time_name]
4059        quantity = outfile.variables[self.quantity_name]
4060           
4061        i = len(time)
4062
4063        #Store time
4064        time[i] = i*self.time_step #self.domain.time
4065        quantity[i,:] = quantity_slice* self.quantity_multiplier
4066       
4067    def close(self):
4068        self.outfile.close()
4069
4070def urs2sww(basename_in='o', basename_out=None, verbose=False,
4071            remove_nc_files=True,
4072            minlat=None, maxlat=None,
4073            minlon= None, maxlon=None,
4074            mint=None, maxt=None,
4075            mean_stage=0,
4076            origin = None,
4077            zscale=1,
4078            fail_on_NaN=True,
4079            NaN_filler=0,
4080            elevation=None):
4081    """
4082    Convert URS C binary format for wave propagation to
4083    sww format native to abstract_2d_finite_volumes.
4084
4085    Specify only basename_in and read files of the form
4086    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4087    basefilename_waveheight-n-mux containing relative height,
4088    x-velocity and y-velocity, respectively.
4089
4090    Also convert latitude and longitude to UTM. All coordinates are
4091    assumed to be given in the GDA94 datum. The latitude and longitude
4092    information is for  a grid.
4093
4094    min's and max's: If omitted - full extend is used.
4095    To include a value min may equal it, while max must exceed it.
4096    Lat and lon are assumed to be in decimal degrees.
4097    NOTE: minlon is the most east boundary.
4098   
4099    origin is a 3-tuple with geo referenced
4100    UTM coordinates (zone, easting, northing)
4101    It will be the origin of the sww file. This shouldn't be used,
4102    since all of anuga should be able to handle an arbitary origin.
4103
4104
4105    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4106    which means that latitude is the fastest
4107    varying dimension (row major order, so to speak)
4108
4109    In URS C binary the latitudes and longitudes are in assending order.
4110    """
4111    if basename_out == None:
4112        basename_out = basename_in
4113    files_out = urs2nc(basename_in, basename_out)
4114    ferret2sww(basename_out,
4115               minlat=minlat,
4116               maxlat=maxlat,
4117               minlon=minlon,
4118               maxlon=maxlon,
4119               mint=mint,
4120               maxt=maxt,
4121               mean_stage=mean_stage,
4122               origin=origin,
4123               zscale=zscale,
4124               fail_on_NaN=fail_on_NaN,
4125               NaN_filler=NaN_filler,
4126               inverted_bathymetry=True,
4127               verbose=verbose)
4128    #print "files_out",files_out
4129    if remove_nc_files:
4130        for file_out in files_out:
4131            os.remove(file_out)
4132   
4133def urs2nc(basename_in = 'o', basename_out = 'urs'):
4134    """
4135    Convert the 3 urs files to 4 nc files.
4136
4137    The name of the urs file names must be;
4138    [basename_in]_velocity-z-mux
4139    [basename_in]_velocity-e-mux
4140    [basename_in]_waveheight-n-mux
4141   
4142    """
4143   
4144    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4145                basename_in + EAST_VELOCITY_LABEL,
4146                basename_in + NORTH_VELOCITY_LABEL]
4147    files_out = [basename_out+'_ha.nc',
4148                 basename_out+'_ua.nc',
4149                 basename_out+'_va.nc']
4150    quantities = ['HA','UA','VA']
4151
4152    for file_name in files_in:
4153        if os.access(file_name, os.F_OK) == 0 :
4154            msg = 'File %s does not exist or is not accessible' %file_name
4155            raise IOError, msg
4156       
4157    hashed_elevation = None
4158    for file_in, file_out, quantity in map(None, files_in,
4159                                           files_out,
4160                                           quantities):
4161        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4162                                         file_out,
4163                                         quantity)
4164        #print "lonlatdep", lonlatdep
4165        if hashed_elevation == None:
4166            elevation_file = basename_out+'_e.nc'
4167            write_elevation_nc(elevation_file,
4168                                lon,
4169                                lat,
4170                                depth)
4171            hashed_elevation = myhash(lonlatdep)
4172        else:
4173            msg = "The elevation information in the mux files is inconsistent"
4174            assert hashed_elevation == myhash(lonlatdep), msg
4175    files_out.append(elevation_file)
4176    return files_out
4177   
4178def _binary_c2nc(file_in, file_out, quantity):
4179    """
4180    Reads in a quantity urs file and writes a quantity nc file.
4181    additionally, returns the depth and lat, long info,
4182    so it can be written to a file.
4183    """
4184    columns = 3 # long, lat , depth
4185    mux_file = open(file_in, 'rb')
4186   
4187    # Number of points/stations
4188    (points_num,)= unpack('i',mux_file.read(4))
4189
4190    # nt, int - Number of time steps
4191    (time_step_count,)= unpack('i',mux_file.read(4))
4192
4193    #dt, float - time step, seconds
4194    (time_step,) = unpack('f', mux_file.read(4))
4195   
4196    msg = "Bad data in the mux file."
4197    if points_num < 0:
4198        mux_file.close()
4199        raise ANUGAError, msg
4200    if time_step_count < 0:
4201        mux_file.close()
4202        raise ANUGAError, msg
4203    if time_step < 0:
4204        mux_file.close()
4205        raise ANUGAError, msg
4206   
4207    lonlatdep = p_array.array('f')
4208    lonlatdep.read(mux_file, columns * points_num)
4209    lonlatdep = array(lonlatdep, typecode=Float)   
4210    lonlatdep = reshape(lonlatdep, (points_num, columns))
4211   
4212    lon, lat, depth = lon_lat2grid(lonlatdep)
4213    lon_sorted = list(lon)
4214    lon_sorted.sort()
4215
4216    if not lon == lon_sorted:
4217        msg = "Longitudes in mux file are not in ascending order"
4218        raise IOError, msg
4219    lat_sorted = list(lat)
4220    lat_sorted.sort()
4221
4222    if not lat == lat_sorted:
4223        msg = "Latitudes in mux file are not in ascending order"
4224   
4225    nc_file = Write_nc(quantity,
4226                       file_out,
4227                       time_step_count,
4228                       time_step,
4229                       lon,
4230                       lat)
4231
4232    for i in range(time_step_count):
4233        #Read in a time slice  from mux file 
4234        hz_p_array = p_array.array('f')
4235        hz_p_array.read(mux_file, points_num)
4236        hz_p = array(hz_p_array, typecode=Float)
4237        hz_p = reshape(hz_p, (len(lon), len(lat)))
4238        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4239
4240        #write time slice to nc file
4241        nc_file.store_timestep(hz_p)
4242    mux_file.close()
4243    nc_file.close()
4244
4245    return lonlatdep, lon, lat, depth
4246   
4247
4248def write_elevation_nc(file_out, lon, lat, depth_vector):
4249    """
4250    Write an nc elevation file.
4251    """
4252   
4253    # NetCDF file definition
4254    outfile = NetCDFFile(file_out, 'w')
4255
4256    #Create new file
4257    nc_lon_lat_header(outfile, lon, lat)
4258   
4259    # ELEVATION
4260    zname = 'ELEVATION'
4261    outfile.createVariable(zname, precision, (lat_name, lon_name))
4262    outfile.variables[zname].units='CENTIMETERS'
4263    outfile.variables[zname].missing_value=-1.e+034
4264
4265    outfile.variables[lon_name][:]= ensure_numeric(lon)
4266    outfile.variables[lat_name][:]= ensure_numeric(lat)
4267
4268    depth = reshape(depth_vector, ( len(lat), len(lon)))
4269    outfile.variables[zname][:]= depth
4270   
4271    outfile.close()
4272   
4273def nc_lon_lat_header(outfile, lon, lat):
4274    """
4275    outfile is the netcdf file handle.
4276    lon - a list/array of the longitudes
4277    lat - a list/array of the latitudes
4278    """
4279   
4280    outfile.institution = 'Geoscience Australia'
4281    outfile.description = 'Converted from URS binary C'
4282   
4283    # Longitude
4284    outfile.createDimension(lon_name, len(lon))
4285    outfile.createVariable(lon_name, precision, (lon_name,))
4286    outfile.variables[lon_name].point_spacing='uneven'
4287    outfile.variables[lon_name].units='degrees_east'
4288    outfile.variables[lon_name].assignValue(lon)
4289
4290
4291    # Latitude
4292    outfile.createDimension(lat_name, len(lat))
4293    outfile.createVariable(lat_name, precision, (lat_name,))
4294    outfile.variables[lat_name].point_spacing='uneven'
4295    outfile.variables[lat_name].units='degrees_north'
4296    outfile.variables[lat_name].assignValue(lat)
4297
4298
4299   
4300def lon_lat2grid(long_lat_dep):
4301    """
4302    given a list of points that are assumed to be an a grid,
4303    return the long's and lat's of the grid.
4304    long_lat_dep is an array where each row is a position.
4305    The first column is longitudes.
4306    The second column is latitudes.
4307
4308    The latitude is the fastest varying dimension - in mux files
4309    """
4310    LONG = 0
4311    LAT = 1
4312    QUANTITY = 2
4313
4314    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4315   
4316    num_points = long_lat_dep.shape[0]
4317    this_rows_long = long_lat_dep[0,LONG]
4318
4319    # Count the length of unique latitudes
4320    i = 0
4321    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4322        i += 1
4323    # determine the lats and longsfrom the grid
4324    lat = long_lat_dep[:i, LAT]       
4325    long = long_lat_dep[::i, LONG]
4326   
4327    lenlong = len(long)
4328    lenlat = len(lat)
4329    #print 'len lat', lat, len(lat)
4330    #print 'len long', long, len(long)
4331         
4332    msg = 'Input data is not gridded'     
4333    assert num_points % lenlat == 0, msg
4334    assert num_points % lenlong == 0, msg
4335         
4336    # Test that data is gridded       
4337    for i in range(lenlong):
4338        msg = 'Data is not gridded.  It must be for this operation'
4339        first = i*lenlat
4340        last = first + lenlat
4341               
4342        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4343        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4344   
4345   
4346#    print 'range long', min(long), max(long)
4347#    print 'range lat', min(lat), max(lat)
4348#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4349#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4350   
4351   
4352   
4353    msg = 'Out of range latitudes/longitudes'
4354    for l in lat:assert -90 < l < 90 , msg
4355    for l in long:assert -180 < l < 180 , msg
4356
4357    # Changing quantity from lat being the fastest varying dimension to
4358    # long being the fastest varying dimension
4359    # FIXME - make this faster/do this a better way
4360    # use numeric transpose, after reshaping the quantity vector
4361#    quantity = zeros(len(long_lat_dep), Float)
4362    quantity = zeros(num_points, Float)
4363   
4364#    print 'num',num_points
4365    for lat_i, _ in enumerate(lat):
4366        for long_i, _ in enumerate(long):
4367            q_index = lat_i*lenlong+long_i
4368            lld_index = long_i*lenlat+lat_i
4369#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4370            temp = long_lat_dep[lld_index, QUANTITY]
4371            quantity[q_index] = temp
4372           
4373    return long, lat, quantity
4374
4375    ####  END URS 2 SWW  ###
4376
4377    #### URS UNGRIDDED 2 SWW ###
4378
4379    ### PRODUCING THE POINTS NEEDED FILE ###
4380LL_LAT = -50.0
4381LL_LONG = 80.0
4382GRID_SPACING = 1.0/60.0
4383LAT_AMOUNT = 4800
4384LONG_AMOUNT = 3600
4385def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4386                              ll_lat=LL_LAT, ll_long=LL_LONG,
4387                              grid_spacing=GRID_SPACING, 
4388                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4389                              export_csv=False, use_cache=False,
4390                              verbose=False):
4391    """
4392    file_name - name of the urs file produced for David.
4393    boundary_polygon - a list of points that describes a polygon.
4394                      The last point is assumed ot join the first point.
4395                      This is in UTM (lat long would be better though)
4396
4397    ll_lat - lower left latitude, in decimal degrees
4398    ll-long - lower left longitude, in decimal degrees
4399    grid_spacing - in deciamal degrees
4400
4401
4402    Don't add the file extension.  It will be added.
4403    """
4404    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4405                            grid_spacing, 
4406                      lat_amount, long_amount,use_cache, verbose)
4407    if not file_name[-4:] == ".urs":
4408        file_name += ".urs"
4409    geo.export_points_file(file_name)
4410    if export_csv:
4411        if file_name[-4:] == ".urs":
4412            file_name = file_name[:-4] + ".csv"
4413        geo.export_points_file(file_name)
4414
4415def URS_points_needed(boundary_polygon, zone, ll_lat=LL_LAT,
4416                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4417                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4418                      use_cache=False, verbose=False):
4419    args = (boundary_polygon,
4420                      zone)
4421    kwargs = {'ll_lat': ll_lat,
4422              'll_long': ll_long,
4423              'grid_spacing': grid_spacing,
4424              'lat_amount': lat_amount,
4425              'long_amount': long_amount} 
4426    if use_cache is True:
4427        try:
4428            from anuga.caching import cache
4429        except:
4430            msg = 'Caching was requested, but caching module'+\
4431                  'could not be imported'
4432            raise msg
4433
4434
4435        geo = cache(_URS_points_needed,
4436                  args, kwargs,
4437                  verbose=verbose,
4438                  compression=False)
4439    else:
4440        #I was getting 'got multiple values for keyword argument' errors
4441        #geo = apply(_URS_points_needed, args, kwargs)
4442        geo = _URS_points_needed(boundary_polygon,
4443                      zone, ll_lat,
4444                      ll_long, grid_spacing, 
4445                      lat_amount, long_amount)
4446
4447    return geo   
4448def _URS_points_needed(boundary_polygon,
4449                      zone, ll_lat=LL_LAT,
4450                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4451                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT):
4452    """
4453
4454    boundary_polygon - a list of points that describes a polygon.
4455                      The last point is assumed ot join the first point.
4456                      This is in UTM (lat long would be better though)
4457
4458    ll_lat - lower left latitude, in decimal degrees
4459    ll-long - lower left longitude, in decimal degrees
4460    grid_spacing - in deciamal degrees
4461
4462    """
4463   
4464    from sets import ImmutableSet
4465   
4466    msg = "grid_spacing can not be zero"
4467    assert not grid_spacing ==0, msg
4468    a = boundary_polygon
4469    # List of segments.  Each segment is two points.
4470    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4471
4472    # convert the segs to Lat's and longs.
4473   
4474    # Don't assume the zone of the segments is the same as the lower left
4475    # corner of the lat long data!!  They can easily be in different zones
4476   
4477    lat_long_set = ImmutableSet()
4478    for seg in segs:
4479        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4480                      lat_amount, long_amount, zone)
4481        lat_long_set |= ImmutableSet(points_lat_long)
4482    #print "lat_long_set",lat_long_set
4483    geo = Geospatial_data(data_points=list(lat_long_set),
4484                              points_are_lats_longs=True)
4485    return geo
4486   
4487def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4488                  lat_amount, long_amount, zone):
4489    """
4490    return a list of the points, in lats and longs that are needed to
4491    interpolate any point on the segment.
4492    """
4493    from math import sqrt
4494    #print "zone",zone
4495    geo_reference = Geo_reference(zone=zone)
4496    #print "seg",seg
4497    geo = Geospatial_data(seg,geo_reference=geo_reference)
4498    seg_lat_long = geo.get_data_points(as_lat_long=True)
4499    #print "seg_lat_long", seg_lat_long
4500    # 1.415 = 2^0.5, rounded up....
4501    sqrt_2_rounded_up = 1.415
4502    buffer = sqrt_2_rounded_up * grid_spacing
4503   
4504    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4505    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4506    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4507    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4508
4509    first_row = (min_long - ll_long)/grid_spacing
4510    # To round up
4511    first_row_long = int(round(first_row + 0.5))
4512    #print "first_row", first_row_long
4513
4514    last_row = (max_long - ll_long)/grid_spacing # round down
4515    last_row_long = int(round(last_row))
4516    #print "last_row",last_row _long
4517   
4518    first_row = (min_lat - ll_lat)/grid_spacing
4519    # To round up
4520    first_row_lat = int(round(first_row + 0.5))
4521    #print "first_row", first_row_lat
4522
4523    last_row = (max_lat - ll_lat)/grid_spacing # round down
4524    last_row_lat = int(round(last_row))
4525    #print "last_row",last_row_lat
4526
4527    # to work out the max distance -
4528    # 111120 - horizontal distance between 1 deg latitude.
4529    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4530    max_distance = 157147.4112 * grid_spacing
4531    #print "max_distance", max_distance #2619.12 m for 1 minute
4532    points_lat_long = []
4533    # Create a list of the lat long points to include.
4534    for index_lat in range(first_row_lat, last_row_lat + 1):
4535        for index_long in range(first_row_long, last_row_long + 1):
4536            lat = ll_lat + index_lat*grid_spacing
4537            long = ll_long + index_long*grid_spacing
4538
4539            #filter here to keep good points
4540            if keep_point(lat, long, seg, max_distance):
4541                points_lat_long.append((lat, long)) #must be hashable
4542    #print "points_lat_long", points_lat_long
4543
4544    # Now that we have these points, lets throw ones out that are too far away
4545    return points_lat_long
4546
4547def keep_point(lat, long, seg, max_distance):
4548    """
4549    seg is two points, UTM
4550    """
4551    from math import sqrt
4552    _ , x0, y0 = redfearn(lat, long)
4553    x1 = seg[0][0]
4554    y1 = seg[0][1]
4555    x2 = seg[1][0]
4556    y2 = seg[1][1]
4557
4558    x2_1 = x2-x1
4559    y2_1 = y2-y1
4560    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4561    if d <= max_distance:
4562        return True
4563    else:
4564        return False
4565   
4566    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4567   
4568WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
4569EAST_VELOCITY_LABEL =  '_velocity-e-mux'
4570NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
4571def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4572                      mint=None, maxt=None,
4573                      mean_stage=0,
4574                      origin=None,
4575                      hole_points_UTM=None,
4576                      zscale=1):
4577    """   
4578    Convert URS C binary format for wave propagation to
4579    sww format native to abstract_2d_finite_volumes.
4580
4581
4582    Specify only basename_in and read files of the form
4583    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4584    basefilename_waveheight-n-mux containing relative height,
4585    x-velocity and y-velocity, respectively.
4586
4587    Also convert latitude and longitude to UTM. All coordinates are
4588    assumed to be given in the GDA94 datum. The latitude and longitude
4589    information is assumed ungridded grid.
4590
4591    min's and max's: If omitted - full extend is used.
4592    To include a value min ans max may equal it.
4593    Lat and lon are assumed to be in decimal degrees.
4594   
4595    origin is a 3-tuple with geo referenced
4596    UTM coordinates (zone, easting, northing)
4597    It will be the origin of the sww file. This shouldn't be used,
4598    since all of anuga should be able to handle an arbitary origin.
4599    The mux point info is NOT relative to this origin.
4600
4601
4602    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4603    which means that latitude is the fastest
4604    varying dimension (row major order, so to speak)
4605
4606    In URS C binary the latitudes and longitudes are in assending order.
4607
4608    Note, interpolations of the resulting sww file will be different
4609    from results of urs2sww.  This is due to the interpolation
4610    function used, and the different grid structure between urs2sww
4611    and this function.
4612   
4613    Interpolating data that has an underlying gridded source can
4614    easily end up with different values, depending on the underlying
4615    mesh.
4616
4617    consider these 4 points
4618    50  -50
4619
4620    0     0
4621
4622    The grid can be
4623     -
4624    |\|    A
4625     -
4626     or;
4627      -
4628     |/|   B
4629      -
4630      If a point is just below the center of the midpoint, it will have a
4631      +ve value in grid A and a -ve value in grid B.
4632    """ 
4633    from anuga.pmesh.mesh import Mesh, NoTrianglesError
4634
4635    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4636                basename_in + EAST_VELOCITY_LABEL,
4637                basename_in + NORTH_VELOCITY_LABEL]
4638    quantities = ['HA','UA','VA']
4639
4640    # instanciate urs_points of the three mux files.
4641    mux = {}
4642    for quantity, file in map(None, quantities, files_in):
4643        mux[quantity] = Urs_points(file)
4644       
4645    # Could check that the depth is the same. (hashing)
4646
4647    # handle to a mux file to do depth stuff
4648    a_mux = mux[quantities[0]]
4649   
4650    # Convert to utm
4651    lat = a_mux.lonlatdep[:,1]
4652    long = a_mux.lonlatdep[:,0]
4653    points_utm, zone = convert_from_latlon_to_utm( \
4654        latitudes=lat, longitudes=long)
4655    #print "points_utm", points_utm
4656    #print "zone", zone
4657
4658    elevation = a_mux.lonlatdep[:,2] * -1 #
4659   
4660    # grid ( create a mesh from the selected points)
4661    # This mesh has a problem.  Triangles are streched over ungridded areas.
4662    #  If these areas could be described as holes in pmesh, that would be great
4663
4664    # I can't just get the user to selection a point in the middle.
4665    # A boundary is needed around these points.
4666    # But if the zone of points is obvious enough auto-segment should do
4667    # a good boundary.
4668    mesh = Mesh()
4669    mesh.add_vertices(points_utm)
4670    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4671    # To try and avoid alpha shape 'hugging' too much
4672    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
4673    if hole_points_UTM is not None:
4674        point = ensure_absolute(hole_points_UTM)
4675        mesh.add_hole(point[0], point[1])
4676    try:
4677        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4678    except NoTrianglesError:
4679        # This is a bit of a hack, going in and changing the
4680        # data structure.
4681        mesh.holes = []
4682        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4683    mesh_dic = mesh.Mesh2MeshList()
4684
4685    #mesh.export_mesh_file(basename_in + '_168.tsh')
4686    #import sys; sys.exit()
4687    # These are the times of the mux file
4688    mux_times = []
4689    for i in range(a_mux.time_step_count):
4690        mux_times.append(a_mux.time_step * i) 
4691    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4692    times = mux_times[mux_times_start_i:mux_times_fin_i]
4693   
4694    if mux_times_start_i == mux_times_fin_i:
4695        # Close the mux files
4696        for quantity, file in map(None, quantities, files_in):
4697            mux[quantity].close()
4698        msg="Due to mint and maxt there's no time info in the boundary SWW."
4699        raise Exception, msg
4700       
4701    # If this raise is removed there is currently no downstream errors
4702           
4703    points_utm=ensure_numeric(points_utm)
4704    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4705    #print "points_utm", points_utm
4706    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4707           ensure_numeric(points_utm)
4708   
4709    volumes = mesh_dic['generatedtrianglelist']
4710   
4711    # write sww intro and grid stuff.   
4712    if basename_out is None:
4713        swwname = basename_in + '.sww'
4714    else:
4715        swwname = basename_out + '.sww'
4716
4717    if verbose: print 'Output to ', swwname
4718    outfile = NetCDFFile(swwname, 'w')
4719    # For a different way of doing this, check out tsh2sww
4720    # work out sww_times and the index range this covers
4721    sww = Write_sww()
4722    sww.store_header(outfile, times, len(volumes), len(points_utm),
4723                     verbose=verbose)
4724    outfile.mean_stage = mean_stage
4725    outfile.zscale = zscale
4726
4727    sww.store_triangulation(outfile, points_utm, volumes,
4728                            elevation, zone,  new_origin=origin,
4729                            verbose=verbose)
4730   
4731    if verbose: print 'Converting quantities'
4732    j = 0
4733    # Read in a time slice from each mux file and write it to the sww file
4734    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
4735        if j >= mux_times_start_i and j < mux_times_fin_i:
4736            stage = zscale*ha + mean_stage
4737            h = stage - elevation
4738            xmomentum = ua*h
4739            ymomentum = -1*va*h # -1 since in mux files south is positive.
4740            sww.store_quantities(outfile, 
4741                                 slice_index=j - mux_times_start_i,
4742                                 verbose=verbose,
4743                                 stage=stage,
4744                                 xmomentum=xmomentum,
4745                                 ymomentum=ymomentum)
4746        j += 1
4747    if verbose: sww.verbose_quantities(outfile)
4748    outfile.close()
4749    #
4750    # Do some conversions while writing the sww file
4751
4752   
4753def mux2sww_time(mux_times, mint, maxt):
4754    """
4755    """
4756
4757    if mint == None:
4758        mux_times_start_i = 0
4759    else:
4760        mux_times_start_i = searchsorted(mux_times, mint)
4761       
4762    if maxt == None:
4763        mux_times_fin_i = len(mux_times)
4764    else:
4765        maxt += 0.5 # so if you specify a time where there is
4766                    # data that time will be included
4767        mux_times_fin_i = searchsorted(mux_times, maxt)
4768
4769    return mux_times_start_i, mux_times_fin_i
4770
4771
4772class Write_sww:
4773    from anuga.shallow_water.shallow_water_domain import Domain
4774
4775    # FIXME (Ole): Hardwiring the conserved quantities like
4776    # this could be a problem. I would prefer taking them from
4777    # the instantiation of Domain.
4778    #
4779    # (DSG) There is not always a Domain instance when Write_sww is used.
4780    # Check to see if this is the same level of hardwiring as is in
4781    # shallow water doamain.
4782   
4783    sww_quantities = Domain.conserved_quantities
4784
4785
4786    RANGE = '_range'
4787    EXTREMA = ':extrema'
4788
4789    def __init__(self):
4790        pass
4791   
4792    def store_header(self,
4793                     outfile,
4794                     times,
4795                     number_of_volumes,
4796                     number_of_points,
4797                     description='Converted from XXX',
4798                     smoothing=True,
4799                     order=1, verbose=False):
4800        """
4801        outfile - the name of the file that will be written
4802        times - A list of the time slice times OR a start time
4803        Note, if a list is given the info will be made relative.
4804        number_of_volumes - the number of triangles
4805        """
4806   
4807        outfile.institution = 'Geoscience Australia'
4808        outfile.description = description
4809
4810        # For sww compatibility
4811        if smoothing is True:
4812            # Smoothing to be depreciated
4813            outfile.smoothing = 'Yes'
4814            outfile.vertices_are_stored_uniquely = 'False'
4815        else:
4816            # Smoothing to be depreciated
4817            outfile.smoothing = 'No'
4818            outfile.vertices_are_stored_uniquely = 'True'
4819        outfile.order = order
4820
4821        try:
4822            revision_number = get_revision_number()
4823        except:
4824            revision_number = None
4825        # Allow None to be stored as a string               
4826        outfile.revision_number = str(revision_number) 
4827
4828
4829       
4830        # times - A list or array of the time slice times OR a start time
4831        # times = ensure_numeric(times)
4832        # Start time in seconds since the epoch (midnight 1/1/1970)
4833
4834        # This is being used to seperate one number from a list.
4835        # what it is actually doing is sorting lists from numeric arrays.
4836        if type(times) is list or type(times) is ArrayType: 
4837            number_of_times = len(times)
4838            times = ensure_numeric(times) 
4839            if number_of_times == 0:
4840                starttime = 0
4841            else:
4842                starttime = times[0]
4843                times = times - starttime  #Store relative times
4844        else:
4845            number_of_times = 0
4846            starttime = times
4847            #times = ensure_numeric([])
4848        outfile.starttime = starttime
4849        # dimension definitions
4850        outfile.createDimension('number_of_volumes', number_of_volumes)
4851        outfile.createDimension('number_of_vertices', 3)
4852        outfile.createDimension('numbers_in_range', 2)
4853   
4854        if smoothing is True:
4855            outfile.createDimension('number_of_points', number_of_points)
4856       
4857            # FIXME(Ole): This will cause sww files for paralle domains to
4858            # have ghost nodes stored (but not used by triangles).
4859            # To clean this up, we have to change get_vertex_values and
4860            # friends in quantity.py (but I can't be bothered right now)
4861        else:
4862            outfile.createDimension('number_of_points', 3*number_of_volumes)
4863        outfile.createDimension('number_of_timesteps', number_of_times)
4864
4865        # variable definitions
4866        outfile.createVariable('x', precision, ('number_of_points',))
4867        outfile.createVariable('y', precision, ('number_of_points',))
4868        outfile.createVariable('elevation', precision, ('number_of_points',))
4869        q = 'elevation'
4870        outfile.createVariable(q+Write_sww.RANGE, precision,
4871                               ('numbers_in_range',))
4872
4873
4874        # Initialise ranges with small and large sentinels.
4875        # If this was in pure Python we could have used None sensibly
4876        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min               
4877        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4878
4879        # FIXME: Backwards compatibility
4880        outfile.createVariable('z', precision, ('number_of_points',))
4881        #################################
4882
4883        outfile.createVariable('volumes', Int, ('number_of_volumes',
4884                                                'number_of_vertices'))
4885
4886        outfile.createVariable('time', precision,
4887                               ('number_of_timesteps',))
4888       
4889        for q in Write_sww.sww_quantities:
4890            outfile.createVariable(q, precision,
4891                                   ('number_of_timesteps',
4892                                    'number_of_points')) 
4893            outfile.createVariable(q+Write_sww.RANGE, precision,
4894                                   ('numbers_in_range',))
4895
4896            # Initialise ranges with small and large sentinels.
4897            # If this was in pure Python we could have used None sensibly
4898            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min               
4899            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4900           
4901        if type(times) is list or type(times) is ArrayType: 
4902            outfile.variables['time'][:] = times    #Store time relative
4903           
4904        if verbose:
4905            print '------------------------------------------------'
4906            print 'Statistics:'
4907            print '    t in [%f, %f], len(t) == %d'\
4908                  %(min(times.flat), max(times.flat), len(times.flat))
4909
4910       
4911    def store_triangulation(self,
4912                            outfile,
4913                            points_utm,
4914                            volumes,
4915                            elevation, zone=None, new_origin=None, 
4916                            points_georeference=None, verbose=False):
4917        """
4918       
4919        new_origin - qa georeference that the points can be set to. (Maybe
4920        do this before calling this function.)
4921       
4922        points_utm - currently a list or array of the points in UTM.
4923        points_georeference - the georeference of the points_utm
4924       
4925        How about passing new_origin and current_origin.
4926        If you get both, do a convertion from the old to the new.
4927       
4928        If you only get new_origin, the points are absolute,
4929        convert to relative
4930       
4931        if you only get the current_origin the points are relative, store
4932        as relative.
4933       
4934        if you get no georefs create a new georef based on the minimums of
4935        points_utm.  (Another option would be to default to absolute)
4936       
4937        Yes, and this is done in another part of the code.
4938        Probably geospatial.
4939       
4940        If you don't supply either geo_refs, then supply a zone. If not
4941        the default zone will be used.
4942       
4943       
4944        precon
4945       
4946        header has been called.
4947        """
4948       
4949        number_of_points = len(points_utm)   
4950        volumes = array(volumes) 
4951        points_utm = array(points_utm)
4952
4953        # given the two geo_refs and the points, do the stuff
4954        # described in the method header
4955        # if this is needed else where, pull out as a function
4956        points_georeference = ensure_geo_reference(points_georeference)
4957        new_origin = ensure_geo_reference(new_origin)
4958        if new_origin is None and points_georeference is not None:
4959            points = points_utm
4960            geo_ref = points_georeference
4961        else:
4962            if new_origin is None:
4963                new_origin = Geo_reference(zone,min(points_utm[:,0]),
4964                                           min(points_utm[:,1]))
4965            points = new_origin.change_points_geo_ref(points_utm,
4966                                                      points_georeference)
4967            geo_ref = new_origin
4968
4969        # At this stage I need a georef and points
4970        # the points are relative to the georef
4971        geo_ref.write_NetCDF(outfile)
4972   
4973        # This will put the geo ref in the middle
4974        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4975       
4976        x =  points[:,0]
4977        y =  points[:,1]
4978        z = outfile.variables['z'][:]
4979   
4980        if verbose:
4981            print '------------------------------------------------'
4982            print 'More Statistics:'
4983            print '  Extent (/lon):'
4984            print '    x in [%f, %f], len(lat) == %d'\
4985                  %(min(x), max(x),
4986                    len(x))
4987            print '    y in [%f, %f], len(lon) == %d'\
4988                  %(min(y), max(y),
4989                    len(y))
4990            print '    z in [%f, %f], len(z) == %d'\
4991                  %(min(elevation), max(elevation),
4992                    len(elevation))
4993            print 'geo_ref: ',geo_ref
4994            print '------------------------------------------------'
4995           
4996        #z = resize(bath_grid,outfile.variables['z'][:].shape)
4997        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
4998        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
4999        outfile.variables['z'][:] = elevation
5000        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5001        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
5002
5003        q = 'elevation'
5004        # This updates the _range values
5005        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5006        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
5007
5008
5009    def store_quantities(self, outfile, precision=Float,
5010                         slice_index=None, time=None,
5011                         verbose=False, **quant):
5012        """
5013        Write the quantity info.
5014
5015        **quant is extra keyword arguments passed in. These must be
5016          the sww quantities, currently; stage, xmomentum, ymomentum.
5017       
5018        if the time array is already been built, use the slice_index
5019        to specify the index.
5020       
5021        Otherwise, use time to increase the time dimension
5022
5023        Maybe make this general, but the viewer assumes these quantities,
5024        so maybe we don't want it general - unless the viewer is general
5025       
5026        precon
5027        triangulation and
5028        header have been called.
5029        """
5030
5031        if time is not None:
5032            file_time = outfile.variables['time']
5033            slice_index = len(file_time)
5034            file_time[slice_index] = time   
5035
5036        # write the conserved quantities from Domain.
5037        # Typically stage,  xmomentum, ymomentum
5038        # other quantities will be ignored, silently.
5039        for q in Write_sww.sww_quantities:
5040            if not quant.has_key(q):
5041                msg = 'SWW file can not write quantity %s' %q
5042                raise NewQuantity, msg
5043            else:
5044                q_values = quant[q]
5045                outfile.variables[q][slice_index] = q_values.astype(precision)
5046
5047                # This updates the _range values
5048                q_range = outfile.variables[q+Write_sww.RANGE][:]
5049                q_values_min = min(q_values)
5050                if q_values_min < q_range[0]:
5051                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5052                q_values_max = max(q_values)
5053                if q_values_max > q_range[1]:
5054                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5055
5056    def verbose_quantities(self, outfile):
5057        print '------------------------------------------------'
5058        print 'More Statistics:'
5059        for q in Write_sww.sww_quantities:
5060            print %s in [%f, %f]' %(q,
5061                                       outfile.variables[q+Write_sww.RANGE][0],
5062                                       outfile.variables[q+Write_sww.RANGE][1])
5063        print '------------------------------------------------'
5064
5065
5066       
5067def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
5068                         mean_stage=0, zscale=1,
5069                         verbose=False):   
5070    #Time stepping
5071    stage = outfile.variables['stage']
5072    xmomentum = outfile.variables['xmomentum']
5073    ymomentum = outfile.variables['ymomentum']
5074
5075    n = len(has)
5076    j=0
5077    for ha, ua, va in map(None, has, uas, vas):
5078        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5079        w = zscale*ha + mean_stage
5080        stage[j] = w
5081        h = w - elevation
5082        xmomentum[j] = ua*h
5083        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
5084        j += 1
5085   
5086def urs2txt(basename_in, location_index=None):
5087    """
5088    Not finished or tested
5089    """
5090   
5091    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5092                basename_in + EAST_VELOCITY_LABEL,
5093                basename_in + NORTH_VELOCITY_LABEL]
5094    quantities = ['HA','UA','VA']
5095
5096    d = ","
5097   
5098    # instanciate urs_points of the three mux files.
5099    mux = {}
5100    for quantity, file in map(None, quantities, files_in):
5101        mux[quantity] = Urs_points(file)
5102       
5103    # Could check that the depth is the same. (hashing)
5104
5105    # handle to a mux file to do depth stuff
5106    a_mux = mux[quantities[0]]
5107   
5108    # Convert to utm
5109    latitudes = a_mux.lonlatdep[:,1]
5110    longitudes = a_mux.lonlatdep[:,0]
5111    points_utm, zone = convert_from_latlon_to_utm( \
5112        latitudes=latitudes, longitudes=longitudes)
5113    #print "points_utm", points_utm
5114    #print "zone", zone
5115    depths = a_mux.lonlatdep[:,2]  #
5116   
5117    fid = open(basename_in + '.txt', 'w')
5118
5119    fid.write("zone: " + str(zone) + "\n")
5120
5121    if location_index is not None:
5122        #Title
5123        li = location_index
5124        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5125                  'Northing' + "\n")
5126        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5127              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5128              str(points_utm[li][01]) + "\n")
5129
5130    # the non-time dependent stuff
5131    #Title
5132    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5133                  'Northing' +d+ 'depth m' + "\n")
5134    i = 0
5135    for depth, point_utm, lat, long in map(None, depths,
5136                                               points_utm, latitudes,
5137                                               longitudes):
5138       
5139        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5140                  str(point_utm[01]) +d+ str(depth) + "\n")
5141        i +=1
5142    #Time dependent
5143    if location_index is not None:
5144        time_step = a_mux.time_step
5145        i = 0
5146        #Title
5147        fid.write('time' +d+ 'HA depth m'+d+ \
5148                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
5149                      + "\n")
5150        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5151            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5152                      str(UA[location_index]) +d+ str(VA[location_index]) \
5153                      + "\n")
5154           
5155            i +=1
5156   
5157class Urs_points:
5158    """
5159    Read the info in URS mux files.
5160
5161    for the quantities heres a correlation between the file names and
5162    what they mean;
5163    z-mux is height above sea level, m
5164    e-mux is velocity is Eastern direction, m/s
5165    n-mux is velocity is Northern direction, m/s   
5166    """
5167    def __init__(self,urs_file):
5168        self.iterated = False
5169        columns = 3 # long, lat , depth
5170        mux_file = open(urs_file, 'rb')
5171       
5172        # Number of points/stations
5173        (self.points_num,)= unpack('i',mux_file.read(4))
5174       
5175        # nt, int - Number of time steps
5176        (self.time_step_count,)= unpack('i',mux_file.read(4))
5177        #print "self.time_step_count", self.time_step_count
5178        #dt, float - time step, seconds
5179        (self.time_step,) = unpack('f', mux_file.read(4))
5180        #print "self.time_step", self.time_step
5181        msg = "Bad data in the urs file."
5182        if self.points_num < 0:
5183            mux_file.close()
5184            raise ANUGAError, msg
5185        if self.time_step_count < 0:
5186            mux_file.close()
5187            raise ANUGAError, msg
5188        if self.time_step < 0:
5189            mux_file.close()
5190            raise ANUGAError, msg
5191
5192        # the depth is in meters, and it is the distance from the ocean
5193        # to the sea bottom.
5194        lonlatdep = p_array.array('f')
5195        lonlatdep.read(mux_file, columns * self.points_num)
5196        lonlatdep = array(lonlatdep, typecode=Float)   
5197        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5198        #print 'lonlatdep',lonlatdep
5199        self.lonlatdep = lonlatdep
5200       
5201        self.mux_file = mux_file
5202        # check this array
5203
5204    def __iter__(self):
5205        """
5206        iterate over quantity data which is with respect to time.
5207
5208        Note: You can only interate once over an object
5209       
5210        returns quantity infomation for each time slice
5211        """
5212        msg =  "You can only interate once over a urs file."
5213        assert not self.iterated, msg
5214        self.iter_time_step = 0
5215        self.iterated = True
5216        return self
5217   
5218    def next(self):
5219        if self.time_step_count == self.iter_time_step:
5220            self.close()
5221            raise StopIteration
5222        #Read in a time slice  from mux file 
5223        hz_p_array = p_array.array('f')
5224        hz_p_array.read(self.mux_file, self.points_num)
5225        hz_p = array(hz_p_array, typecode=Float)
5226        self.iter_time_step += 1
5227       
5228        return hz_p
5229
5230    def close(self):
5231        self.mux_file.close()
5232       
5233    #### END URS UNGRIDDED 2 SWW ###
5234
5235       
5236def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
5237                         print_to_screen=False, verbose=False):
5238    """
5239    Used to store screen output and errors to file, if run on multiple
5240    processes eachprocessor will have its own output and error file.
5241   
5242    extra_info - is used as a string that can identify outputs with another
5243    string eg. '_other'
5244    """
5245    import sys
5246    dir_name = dir_name
5247    if access(dir_name,W_OK) == 0:
5248        if verbose: print 'Make directory %s' %dir_name
5249        if verbose: print "myid", myid
5250        mkdir (dir_name,0777)
5251    if myid <>'':
5252        myid = '_'+str(myid)
5253    if numprocs <>'':
5254        numprocs = '_'+str(numprocs)
5255    if extra_info <>'':
5256        extra_info = '_'+str(extra_info)
5257    screen_output_name = dir_name + "screen_output%s%s%s.txt" %(myid,
5258                                                                numprocs,
5259                                                                extra_info)
5260    screen_error_name = dir_name + "screen_error%s%s%s.txt" %(myid,
5261                                                              numprocs,
5262                                                              extra_info)
5263    print screen_output_name
5264    #used to catch screen output to file
5265    sys.stdout = Screen_Catcher(screen_output_name)
5266    sys.stderr = Screen_Catcher(screen_error_name)
5267
5268class Screen_Catcher:
5269    """this simply catches the screen output and stores it to file defined by
5270    start_screen_catcher (above)
5271    """
5272   
5273    def __init__(self, filename):
5274        self.filename = filename
5275 
5276        if exists(self.filename)is True:
5277            print'Old existing file "%s" has been deleted' %(self.filename)
5278            remove(self.filename)
5279
5280    def write(self, stuff):
5281        fid = open(self.filename, 'a')
5282        fid.write(stuff)
5283#        if print_to_screen: print stuff
5284
5285def copy_code_files(dir_name, filename1, filename2):
5286    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5287    information management
5288    filename1 and filename2 are both absolute pathnames   
5289    """
5290
5291    if access(dir_name,F_OK) == 0:
5292        print 'Make directory %s' %dir_name
5293        mkdir (dir_name,0777)
5294    shutil.copy(filename1, dir_name + sep + basename(filename1))
5295    shutil.copy(filename2, dir_name + sep + basename(filename2))
5296#    copy (__file__, project.output_run_time_dir + basename(__file__))
5297    print 'Files %s and %s copied' %(filename1, filename2)
5298
5299def get_data_from_file(filename,separator_value = ','):
5300    """
5301    Read in data information from file and
5302   
5303    Returns:
5304        header_fields, a string? of the first line separated
5305        by the 'separator_value'
5306       
5307        data, a array (N data columns X M lines) in the file
5308        excluding the header
5309       
5310    NOTE: wont deal with columns with different lenghts and there must be
5311    no blank lines at the end.
5312    """
5313   
5314    fid = open(filename)
5315    lines = fid.readlines()
5316   
5317    fid.close()
5318   
5319    header_line = lines[0]
5320    header_fields = header_line.split(separator_value)
5321
5322    #array to store data, number in there is to allow float...
5323    #i'm sure there is a better way!
5324    data=array([],typecode=Float)
5325    data=resize(data,((len(lines)-1),len(header_fields)))
5326#    print 'number of fields',range(len(header_fields))
5327#    print 'number of lines',len(lines), shape(data)
5328#    print'data',data[1,1],header_line
5329
5330    array_number = 0
5331    line_number = 1
5332    while line_number < (len(lines)):
5333        for i in range(len(header_fields)): 
5334            #this get line below the header, explaining the +1
5335            #and also the line_number can be used as the array index
5336            fields = lines[line_number].split(separator_value)
5337            #assign to array
5338            data[array_number,i] = float(fields[i])
5339           
5340        line_number = line_number +1
5341        array_number = array_number +1
5342       
5343    return header_fields, data
5344
5345def store_parameters(verbose=False,**kwargs):
5346    """
5347    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5348    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5349   
5350    Must have a file_name keyword arg, this is what is writing to.
5351    might be a better way to do this using CSV module Writer and writeDict
5352   
5353    writes file to "output_dir" unless "completed" is in kwargs, then
5354    it writes to "file_name" kwargs
5355
5356    """
5357    import types
5358#    import os
5359   
5360    # Check that kwargs is a dictionary
5361    if type(kwargs) != types.DictType:
5362        raise TypeError
5363   
5364    #is completed is kwargs?
5365    try:
5366        kwargs['completed']
5367        completed=True
5368    except:
5369        completed=False
5370 
5371    #get file name and removes from dict and assert that a file_name exists
5372    if completed:
5373        try:
5374            file = str(kwargs['file_name'])
5375        except:
5376            raise 'kwargs must have file_name'
5377    else:
5378        #write temp file in output directory
5379        try:
5380            file = str(kwargs['output_dir'])+'detail_temp.csv'
5381        except:
5382            raise 'kwargs must have output_dir'
5383       
5384    #extracts the header info and the new line info
5385    line=''
5386    header=''
5387    count=0
5388    keys = kwargs.keys()
5389    keys.sort()
5390   
5391    #used the sorted keys to create the header and line data
5392    for k in keys:
5393#        print "%s = %s" %(k, kwargs[k])
5394        header = header+str(k)
5395        line = line+str(kwargs[k])
5396        count+=1
5397        if count <len(kwargs):
5398            header = header+','
5399            line = line+','
5400    header+='\n'
5401    line+='\n'
5402
5403    # checks the header info, if the same, then write, if not create a new file
5404    #try to open!
5405    try:
5406        fid = open(file,"r")
5407        file_header=fid.readline()
5408        fid.close()
5409        if verbose: print 'read file header %s' %file_header
5410       
5411    except:
5412        msg = 'try to create new file',file
5413        if verbose: print msg
5414        #tries to open file, maybe directory is bad
5415        try:
5416            fid = open(file,"w")
5417            fid.write(header)
5418            fid.close()
5419            file_header=header
5420        except:
5421            msg = 'cannot create new file',file
5422            raise msg
5423           
5424    #if header is same or this is a new file
5425    if file_header==str(header):
5426        fid=open(file,"a")
5427        #write new line
5428        fid.write(line)
5429        fid.close()
5430    else:
5431        #backup plan,
5432        # if header is different and has completed will append info to
5433        #end of details_temp.cvs file in output directory
5434        file = str(kwargs['output_dir'])+'detail_temp.csv'
5435        fid=open(file,"a")
5436        fid.write(header)
5437        fid.write(line)
5438        fid.close()
5439        if verbose: print 'file',file_header.strip('\n')
5440        if verbose: print 'head',header.strip('\n')
5441        if file_header.strip('\n')==str(header): print 'they equal'
5442        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5443        print msg
5444
5445
5446
5447
5448
5449def get_maximum_inundation_elevation(filename,
5450                                     polygon=None,
5451                                     time_interval=None,
5452                                     verbose=False):
5453   
5454    """Return highest elevation where depth > 0
5455   
5456    Usage:
5457    max_runup = get_maximum_inundation_elevation(filename,
5458                                                 polygon=None,
5459                                                 time_interval=None,
5460                                                 verbose=False)
5461
5462    filename is a NetCDF sww file containing ANUGA model output.   
5463    Optional arguments polygon and time_interval restricts the maximum
5464    runup calculation
5465    to a points that lie within the specified polygon and time interval.
5466
5467    If no inundation is found within polygon and time_interval the return value
5468    is None signifying "No Runup" or "Everything is dry".
5469
5470    See general function get_maximum_inundation_data for details.
5471   
5472    """
5473   
5474    runup, _ = get_maximum_inundation_data(filename,
5475                                           polygon=polygon,
5476                                           time_interval=time_interval,
5477                                           verbose=verbose)
5478    return runup
5479
5480
5481
5482
5483def get_maximum_inundation_location(filename,
5484                                    polygon=None,
5485                                    time_interval=None,
5486                                    verbose=False):
5487    """Return location of highest elevation where h > 0
5488   
5489   
5490    Usage:
5491    max_runup_location = get_maximum_inundation_location(filename,
5492                                                         polygon=None,
5493                                                         time_interval=None,
5494                                                         verbose=False)
5495
5496    filename is a NetCDF sww file containing ANUGA model output.
5497    Optional arguments polygon and time_interval restricts the maximum
5498    runup calculation
5499    to a points that lie within the specified polygon and time interval.
5500
5501    If no inundation is found within polygon and time_interval the return value
5502    is None signifying "No Runup" or "Everything is dry".
5503
5504    See general function get_maximum_inundation_data for details.
5505    """
5506   
5507    _, max_loc = get_maximum_inundation_data(filename,
5508                                             polygon=polygon,
5509                                             time_interval=time_interval,
5510                                             verbose=verbose)
5511    return max_loc
5512   
5513
5514
5515def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
5516                                use_centroid_values=False,
5517                                verbose=False):
5518    """Compute maximum run up height from sww file.
5519
5520
5521    Usage:
5522    runup, location = get_maximum_inundation_data(filename,
5523                                                  polygon=None,
5524                                                  time_interval=None,
5525                                                  verbose=False)
5526   
5527
5528    Algorithm is as in get_maximum_inundation_elevation from
5529    shallow_water_domain
5530    except that this function works with the sww file and computes the maximal
5531    runup height over multiple timesteps.
5532   
5533    Optional arguments polygon and time_interval restricts the
5534    maximum runup calculation
5535    to a points that lie within the specified polygon and time interval.
5536    Polygon is
5537    assumed to be in (absolute) UTM coordinates in the same zone as domain.
5538
5539    If no inundation is found within polygon and time_interval the return value
5540    is None signifying "No Runup" or "Everything is dry".
5541    """
5542
5543    # We are using nodal values here as that is what is stored in sww files.
5544
5545    # Water depth below which it is considered to be 0 in the model
5546    # FIXME (Ole): Allow this to be specified as a keyword argument as well
5547
5548    from anuga.utilities.polygon import inside_polygon   
5549    from anuga.config import minimum_allowed_height
5550    from Scientific.IO.NetCDF import NetCDFFile
5551
5552    dir, base = os.path.split(filename)
5553           
5554    iterate_over = get_all_swwfiles(dir,base)
5555   
5556    # Read sww file
5557    if verbose: 
5558        print 'Reading from %s' %filename
5559        # FIXME: Use general swwstats (when done)
5560   
5561    maximal_runup = None
5562    maximal_runup_location = None
5563   
5564    for file, swwfile in enumerate (iterate_over):
5565       
5566        # Read sww file
5567        filename = join(dir,swwfile+'.sww')
5568       
5569        if verbose: 
5570            print 'Reading from %s' %filename
5571            # FIXME: Use general swwstats (when done)
5572               
5573        fid = NetCDFFile(filename)
5574   
5575        # Get geo_reference
5576        # sww files don't have to have a geo_ref
5577        try:
5578            geo_reference = Geo_reference(NetCDFObject=fid)
5579        except AttributeError, e:
5580            geo_reference = Geo_reference() # Default georef object
5581           
5582        xllcorner = geo_reference.get_xllcorner()
5583        yllcorner = geo_reference.get_yllcorner()
5584        zone = geo_reference.get_zone()
5585       
5586        # Get extent
5587        volumes = fid.variables['volumes'][:]   
5588        x = fid.variables['x'][:] + xllcorner
5589        y = fid.variables['y'][:] + yllcorner
5590   
5591   
5592        # Get the relevant quantities
5593        elevation = fid.variables['elevation'][:] 
5594        stage = fid.variables['stage'][:]
5595   
5596   
5597        # Here's where one could convert nodal information to centroid
5598        # information
5599        # but is probably something we need to write in C.
5600        # Here's a Python thought which is NOT finished!!!
5601        if use_centroid_values is True:
5602            x = get_centroid_values(x, volumes)
5603            y = get_centroid_values(y, volumes)   
5604            elevation = get_centroid_values(elevation, volumes)   
5605   
5606   
5607        # Spatial restriction
5608        if polygon is not None:
5609            msg = 'polygon must be a sequence of points.'
5610            assert len(polygon[0]) == 2, msg
5611            # FIXME (Ole): Make a generic polygon input check in polygon.py
5612            # and call it here
5613           
5614            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
5615   
5616            point_indices = inside_polygon(points, polygon)
5617   
5618            # Restrict quantities to polygon
5619            elevation = take(elevation, point_indices)
5620            stage = take(stage, point_indices, axis=1)
5621   
5622            # Get info for location of maximal runup
5623            points_in_polygon = take(points, point_indices)
5624            x = points_in_polygon[:,0]
5625            y = points_in_polygon[:,1]       
5626        else:
5627            # Take all points
5628            point_indices = arange(len(x))
5629           
5630   
5631        # Temporal restriction
5632        time = fid.variables['time'][:]
5633        all_timeindices = arange(len(time))       
5634        if time_interval is not None:
5635           
5636            msg = 'time_interval must be a sequence of length 2.'
5637            assert len(time_interval) == 2, msg
5638            msg = 'time_interval %s must not be decreasing.' %(time_interval)
5639            assert time_interval[1] >= time_interval[0], msg
5640           
5641            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
5642            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
5643                   %(time[0], time[-1])
5644            if time_interval[1] < time[0]: raise ValueError(msg)
5645            if time_interval[0] > time[-1]: raise ValueError(msg)
5646   
5647            # Take time indices corresponding to interval (& is bitwise AND)
5648            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
5649                                 all_timeindices)
5650   
5651   
5652            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
5653            assert not alltrue(timesteps == 0), msg
5654   
5655   
5656        else:
5657            # Take them all
5658            timesteps = all_timeindices
5659       
5660   
5661        fid.close()
5662   
5663        # Compute maximal runup for each timestep
5664        #maximal_runup = None
5665        #maximal_runup_location = None
5666        #maximal_runups = [None]
5667        #maximal_runup_locations = [None]
5668       
5669        for i in timesteps:
5670            if use_centroid_values is True:
5671                stage_i = get_centroid_values(stage[i,:], volumes)   
5672            else:
5673                stage_i = stage[i,:]
5674               
5675            depth = stage_i  - elevation
5676       
5677            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
5678            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
5679   
5680            if alltrue(wet_nodes == 0):
5681                runup = None
5682            else:   
5683                # Find maximum elevation among wet nodes
5684                wet_elevation = take(elevation, wet_nodes)
5685   
5686                runup_index = argmax(wet_elevation)
5687                runup = max(wet_elevation)
5688                assert wet_elevation[runup_index] == runup # Must always be True
5689            if runup > maximal_runup:
5690                maximal_runup = runup      # This works even if maximal_runups is None
5691                #print "NEW RUNUP",runup
5692   
5693                # Record location
5694                wet_x = take(x, wet_nodes)
5695                wet_y = take(y, wet_nodes)           
5696                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
5697   
5698    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
5699    return maximal_runup, maximal_runup_location
5700
5701def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
5702    '''
5703    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5704    will accept base_name with or without the extension ".sww"
5705   
5706    Returns: a list of strings
5707       
5708    Usage:     iterate_over = get_all_swwfiles(dir, name)
5709    then
5710               for swwfile in iterate_over:
5711                   do stuff
5712                   
5713    Check "export_grids" and "get_maximum_inundation_data" for examples
5714    '''
5715   
5716    #plus tests the extension
5717    name, extension = os.path.splitext(base_name)
5718
5719    if extension <>'' and extension <> '.sww':
5720        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
5721        raise IOError, msg
5722
5723    if look_in_dir == "":
5724        look_in_dir = "." # Unix compatibility
5725   
5726    dir_ls = os.listdir(look_in_dir)
5727    #print 'dir_ls',dir_ls, base
5728    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
5729    if len(iterate_over) == 0:
5730        msg = 'No files of the base name %s'\
5731              %(name)
5732        raise IOError, msg
5733    if verbose: print 'iterate over %s' %(iterate_over)
5734
5735    return iterate_over
5736
5737def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
5738    '''
5739    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5740   
5741   
5742    Returns: a list of strings
5743       
5744    Usage:     iterate_over = get_all_swwfiles(dir, name)
5745    then
5746               for swwfile in iterate_over:
5747                   do stuff
5748                   
5749    Check "export_grids" and "get_maximum_inundation_data" for examples
5750    '''
5751   
5752    #plus tests the extension
5753    name, ext = os.path.splitext(base_name)
5754#    print 'look_in_dir',look_in_dir
5755
5756    if ext <>'' and ext <> extension:
5757        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
5758        raise IOError, msg
5759
5760    if look_in_dir == "":
5761        look_in_dir = "." # Unix compatibility
5762#    print 'look_in_dir',look_in_dir, getcwd()
5763    dir_ls = os.listdir(look_in_dir)
5764    #print 'dir_ls',dir_ls, base_name
5765    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
5766    if len(iterate_over) == 0:
5767        msg = 'No files of the base name %s in %s'\
5768              %(name, look_in_dir)
5769        raise IOError, msg
5770    if verbose: print 'iterate over %s' %(iterate_over)
5771
5772    return iterate_over
5773
5774def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
5775    '''
5776    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5777   
5778   
5779    Returns: a list of strings
5780       
5781    Usage:     iterate_over = get_all_swwfiles(dir, name)
5782    then
5783               for swwfile in iterate_over:
5784                   do stuff
5785                   
5786    Check "export_grids" and "get_maximum_inundation_data" for examples
5787    '''
5788   
5789    #plus tests the extension
5790
5791    if look_in_dir == "":
5792        look_in_dir = "." # Unix compatibility
5793#    print 'look_in_dir',look_in_dir
5794    dir_ls = os.listdir(look_in_dir)
5795#    print 'dir_ls',dir_ls
5796    iterate_over = [x for x in dir_ls if base_name in x]
5797    if len(iterate_over) == 0:
5798        msg = 'No files of the base name %s'\
5799              %(name)
5800        raise IOError, msg
5801    if verbose: print 'iterate over %s' %(iterate_over)
5802
5803    return iterate_over
5804
5805
5806#-------------------------------------------------------------
5807if __name__ == "__main__":
5808    #setting umask from config to force permissions for all files and directories
5809    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
5810    # set in your .bashrc etc file)
5811    from config import umask
5812    import os 
5813    os.umask(umask)
5814
Note: See TracBrowser for help on using the repository browser.