source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4824

Last change on this file since 4824 was 4824, checked in by ole, 17 years ago

Beginnings of differentiation between xllcorner and xllcenter. Not yet finished.

File size: 189.2 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.csv: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57class NewQuantity(exceptions.Exception): pass
58
59
60
61import csv
62import os, sys
63import shutil
64from struct import unpack
65import array as p_array
66#import time, os
67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
68
69
70from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
71     searchsorted, zeros, allclose, around, reshape, transpose, sort, \
72     NewAxis, ArrayType, compress, take, arange, argmax, alltrue,shape,Float
73
74import string
75
76from Scientific.IO.NetCDF import NetCDFFile
77#from shutil import copy
78from os.path import exists, basename, join
79from os import getcwd
80
81
82from anuga.coordinate_transforms.redfearn import redfearn, \
83     convert_from_latlon_to_utm
84from anuga.coordinate_transforms.geo_reference import Geo_reference, \
85     write_NetCDF_georeference, ensure_geo_reference
86from anuga.geospatial_data.geospatial_data import Geospatial_data,\
87     ensure_absolute
88from anuga.config import minimum_storable_height as default_minimum_storable_height
89from anuga.config import max_float
90from anuga.utilities.numerical_tools import ensure_numeric,  mean
91from anuga.caching.caching import myhash
92from anuga.utilities.anuga_exceptions import ANUGAError
93from anuga.shallow_water import Domain
94from anuga.abstract_2d_finite_volumes.pmesh2domain import \
95     pmesh_to_domain_instance
96from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
97     remove_lone_verts, sww2timeseries, get_centroid_values
98from anuga.load_mesh.loadASCII import export_mesh_file
99# formula mappings
100
101quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
102                    'depth':'stage-elevation',
103                    'speed': \
104 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
105
106
107   
108def make_filename(s):
109    """Transform argument string into a Sexsuitable filename
110    """
111
112    s = s.strip()
113    s = s.replace(' ', '_')
114    s = s.replace('(', '')
115    s = s.replace(')', '')
116    s = s.replace('__', '_')
117
118    return s
119
120
121def check_dir(path, verbose=None):
122    """Check that specified path exists.
123    If path does not exist it will be created if possible
124
125    USAGE:
126       checkdir(path, verbose):
127
128    ARGUMENTS:
129        path -- Directory
130        verbose -- Flag verbose output (default: None)
131
132    RETURN VALUE:
133        Verified path including trailing separator
134
135    """
136
137    import os.path
138
139    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
140        unix = 0
141    else:
142        unix = 1
143
144
145    if path[-1] != os.sep:
146        path = path + os.sep  # Add separator for directories
147
148    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
149    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
150        try:
151            exitcode=os.mkdir(path)
152
153            # Change access rights if possible
154            #
155            if unix:
156                exitcode=os.system('chmod 775 '+path)
157            else:
158                pass  # FIXME: What about acces rights under Windows?
159
160            if verbose: print 'MESSAGE: Directory', path, 'created.'
161
162        except:
163            print 'WARNING: Directory', path, 'could not be created.'
164            if unix:
165                path = '/tmp/'
166            else:
167                path = 'C:'
168
169            print 'Using directory %s instead' %path
170
171    return(path)
172
173
174
175def del_dir(path):
176    """Recursively delete directory path and all its contents
177    """
178
179    import os
180
181    if os.path.isdir(path):
182        for file in os.listdir(path):
183            X = os.path.join(path, file)
184
185
186            if os.path.isdir(X) and not os.path.islink(X):
187                del_dir(X)
188            else:
189                try:
190                    os.remove(X)
191                except:
192                    print "Could not remove file %s" %X
193
194        os.rmdir(path)
195       
196       
197# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
198def rmgeneric(path, __func__,verbose=False):
199    ERROR_STR= """Error removing %(path)s, %(error)s """
200
201    try:
202        __func__(path)
203        if verbose: print 'Removed ', path
204    except OSError, (errno, strerror):
205        print ERROR_STR % {'path' : path, 'error': strerror }
206           
207def removeall(path,verbose=False):
208
209    if not os.path.isdir(path):
210        return
211   
212    files=os.listdir(path)
213
214    for x in files:
215        fullpath=os.path.join(path, x)
216        if os.path.isfile(fullpath):
217            f=os.remove
218            rmgeneric(fullpath, f)
219        elif os.path.isdir(fullpath):
220            removeall(fullpath)
221            f=os.rmdir
222            rmgeneric(fullpath, f,verbose)
223
224
225
226def create_filename(datadir, filename, format, size=None, time=None):
227
228    import os
229    #from anuga.config import data_dir
230
231    FN = check_dir(datadir) + filename
232
233    if size is not None:
234        FN += '_size%d' %size
235
236    if time is not None:
237        FN += '_time%.2f' %time
238
239    FN += '.' + format
240    return FN
241
242
243def get_files(datadir, filename, format, size):
244    """Get all file (names) with given name, size and format
245    """
246
247    import glob
248
249    import os
250    #from anuga.config import data_dir
251
252    dir = check_dir(datadir)
253
254    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
255    return glob.glob(pattern)
256
257
258
259#Generic class for storing output to e.g. visualisation or checkpointing
260class Data_format:
261    """Generic interface to data formats
262    """
263
264
265    def __init__(self, domain, extension, mode = 'w'):
266        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
267                                        '''   'w' (write)'''+\
268                                        '''   'r' (read)''' +\
269                                        '''   'a' (append)'''
270
271        #Create filename
272        self.filename = create_filename(domain.get_datadir(),
273                                        domain.get_name(), extension)
274
275        #print 'F', self.filename
276        self.timestep = 0
277        self.domain = domain
278       
279
280
281        # Exclude ghosts in case this is a parallel domain
282        self.number_of_nodes = domain.number_of_full_nodes       
283        self.number_of_volumes = domain.number_of_full_triangles
284        #self.number_of_volumes = len(domain)       
285
286
287
288
289        #FIXME: Should we have a general set_precision function?
290
291
292
293#Class for storing output to e.g. visualisation
294class Data_format_sww(Data_format):
295    """Interface to native NetCDF format (.sww) for storing model output
296
297    There are two kinds of data
298
299    1: Constant data: Vertex coordinates and field values. Stored once
300    2: Variable data: Conserved quantities. Stored once per timestep.
301
302    All data is assumed to reside at vertex locations.
303    """
304
305
306    def __init__(self, domain, mode = 'w',\
307                 max_size = 2000000000,
308                 recursion = False):
309        from Scientific.IO.NetCDF import NetCDFFile
310        from Numeric import Int, Float, Float32
311
312        self.precision = Float32 #Use single precision for quantities
313        if hasattr(domain, 'max_size'):
314            self.max_size = domain.max_size #file size max is 2Gig
315        else:
316            self.max_size = max_size
317        self.recursion = recursion
318        self.mode = mode
319
320        Data_format.__init__(self, domain, 'sww', mode)
321
322        if hasattr(domain, 'minimum_storable_height'):
323            self.minimum_storable_height = domain.minimum_storable_height
324        else:
325            self.minimum_storable_height = default_minimum_storable_height
326
327        # NetCDF file definition
328        fid = NetCDFFile(self.filename, mode)
329
330        if mode == 'w':
331            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
332            self.writer = Write_sww()
333            self.writer.store_header(fid,
334                                     domain.starttime,
335                                     self.number_of_volumes,
336                                     self.domain.number_of_full_nodes,
337                                     description=description,
338                                     smoothing=domain.smooth,
339                                     order=domain.default_order)
340
341            # Extra optional information
342            if hasattr(domain, 'texture'):
343                fid.texture = domain.texture
344
345            if domain.quantities_to_be_monitored is not None:
346                fid.createDimension('singleton', 1)
347                fid.createDimension('two', 2)               
348
349                poly = domain.monitor_polygon
350                if poly is not None:
351                    N = len(poly)
352                    fid.createDimension('polygon_length', N)
353                    fid.createVariable('extrema.polygon',
354                                       self.precision,
355                                       ('polygon_length',
356                                        'two'))
357                    fid.variables['extrema.polygon'][:] = poly                                   
358
359                   
360                interval = domain.monitor_time_interval
361                if interval is not None:
362                    fid.createVariable('extrema.time_interval',
363                                       self.precision,
364                                       ('two',))
365                    fid.variables['extrema.time_interval'][:] = interval
366
367               
368                for q in domain.quantities_to_be_monitored:
369                    #print 'doing', q
370                    fid.createVariable(q+'.extrema', self.precision,
371                                       ('numbers_in_range',))
372                    fid.createVariable(q+'.min_location', self.precision,
373                                       ('numbers_in_range',))
374                    fid.createVariable(q+'.max_location', self.precision,
375                                       ('numbers_in_range',))
376                    fid.createVariable(q+'.min_time', self.precision,
377                                       ('singleton',))
378                    fid.createVariable(q+'.max_time', self.precision,
379                                       ('singleton',))
380
381                   
382        fid.close()
383
384
385    def store_connectivity(self):
386        """Specialisation of store_connectivity for net CDF format
387
388        Writes x,y,z coordinates of triangles constituting
389        the bed elevation.
390        """
391
392        from Scientific.IO.NetCDF import NetCDFFile
393
394        from Numeric import concatenate, Int
395
396        domain = self.domain
397
398        #Get NetCDF
399        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
400
401        # Get the variables
402        x = fid.variables['x']
403        y = fid.variables['y']
404        z = fid.variables['elevation']
405
406        volumes = fid.variables['volumes']
407
408        # Get X, Y and bed elevation Z
409        Q = domain.quantities['elevation']
410        X,Y,Z,V = Q.get_vertex_values(xy=True,
411                                      precision=self.precision)
412
413        #
414        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
415        self.writer.store_triangulation(fid,
416                                        points,
417                                        V.astype(volumes.typecode()),
418                                        Z,
419                                        points_georeference= \
420                                        domain.geo_reference)
421
422        # Close
423        fid.close()
424
425
426    def store_timestep(self, names):
427        """Store time and named quantities to file
428        """
429        from Scientific.IO.NetCDF import NetCDFFile
430        import types
431        from time import sleep
432        from os import stat
433
434        from Numeric import choose
435       
436        # Get NetCDF       
437        retries = 0
438        file_open = False
439        while not file_open and retries < 10:
440            try:
441                fid = NetCDFFile(self.filename, 'a') # Open existing file
442            except IOError:
443                # This could happen if someone was reading the file.
444                # In that case, wait a while and try again
445                msg = 'Warning (store_timestep): File %s could not be opened'\
446                      %self.filename
447                msg += ' - trying step %s again' %self.domain.time
448                print msg
449                retries += 1
450                sleep(1)
451            else:
452                file_open = True
453
454        if not file_open:
455            msg = 'File %s could not be opened for append' %self.filename
456            raise DataFileNotOpenError, msg
457
458
459
460        # Check to see if the file is already too big:
461        time = fid.variables['time']
462        i = len(time)+1
463        file_size = stat(self.filename)[6]
464        file_size_increase =  file_size/i
465        if file_size + file_size_increase > self.max_size*(2**self.recursion):
466            # In order to get the file name and start time correct,
467            # I change the domain.filename and domain.starttime.
468            # This is the only way to do this without changing
469            # other modules (I think).
470
471            # Write a filename addon that won't break swollens reader
472            # (10.sww is bad)
473            filename_ext = '_time_%s'%self.domain.time
474            filename_ext = filename_ext.replace('.', '_')
475           
476            # Remember the old filename, then give domain a
477            # name with the extension
478            old_domain_filename = self.domain.get_name()
479            if not self.recursion:
480                self.domain.set_name(old_domain_filename+filename_ext)
481
482
483            # Change the domain starttime to the current time
484            old_domain_starttime = self.domain.starttime
485            self.domain.starttime = self.domain.time
486
487            # Build a new data_structure.
488            next_data_structure=\
489                Data_format_sww(self.domain, mode=self.mode,\
490                                max_size = self.max_size,\
491                                recursion = self.recursion+1)
492            if not self.recursion:
493                print '    file_size = %s'%file_size
494                print '    saving file to %s'%next_data_structure.filename
495            #set up the new data_structure
496            self.domain.writer = next_data_structure
497
498            #FIXME - could be cleaner to use domain.store_timestep etc.
499            next_data_structure.store_connectivity()
500            next_data_structure.store_timestep(names)
501            fid.sync()
502            fid.close()
503
504            #restore the old starttime and filename
505            self.domain.starttime = old_domain_starttime
506            self.domain.set_name(old_domain_filename)           
507        else:
508            self.recursion = False
509            domain = self.domain
510
511            # Get the variables
512            time = fid.variables['time']
513            stage = fid.variables['stage']
514            xmomentum = fid.variables['xmomentum']
515            ymomentum = fid.variables['ymomentum']
516            i = len(time)
517            if type(names) not in [types.ListType, types.TupleType]:
518                names = [names]
519
520            if 'stage' in names and 'xmomentum' in names and \
521               'ymomentum' in names:
522
523                # Get stage
524                Q = domain.quantities['stage']
525                A,_ = Q.get_vertex_values(xy = False,
526                                          precision = self.precision)               
527                z = fid.variables['elevation']
528                stage = choose(A-z[:] >= self.minimum_storable_height,
529                           (z[:], A))
530               
531                # Get xmomentum
532                Q = domain.quantities['xmomentum']
533                xmomentum, _ = Q.get_vertex_values(xy = False,
534                                          precision = self.precision)
535               
536                # Get ymomentum
537                Q = domain.quantities['ymomentum']
538                ymomentum, _ = Q.get_vertex_values(xy = False,
539                                          precision = self.precision)
540               
541                # Write quantities to NetCDF
542                self.writer.store_quantities(fid, 
543                                             time=self.domain.time,
544                                             precision=self.precision,
545                                             stage=stage,
546                                             xmomentum=xmomentum,
547                                             ymomentum=ymomentum)
548            else:
549                # This is producing a sww that is not standard.
550                # Store time
551                time[i] = self.domain.time
552               
553                for name in names:
554                    # Get quantity
555                    Q = domain.quantities[name]
556                    A,V = Q.get_vertex_values(xy = False,
557                                              precision = self.precision)
558
559                    # FIXME: Make this general (see below)
560                    if name == 'stage':
561                        z = fid.variables['elevation']
562                        A = choose(A-z[:] >= self.minimum_storable_height,
563                                   (z[:], A))
564                        stage[i,:] = A.astype(self.precision)
565                    elif name == 'xmomentum':
566                        xmomentum[i,:] = A.astype(self.precision)
567                    elif name == 'ymomentum':
568                        ymomentum[i,:] = A.astype(self.precision)
569
570                   #As in....
571                   #eval( name + '[i,:] = A.astype(self.precision)' )
572                   #FIXME (Ole): But we need a UNIT test for that before
573                   # refactoring
574
575
576
577            # Update extrema if requested
578            domain = self.domain
579            if domain.quantities_to_be_monitored is not None:
580                for q, info in domain.quantities_to_be_monitored.items():
581
582                    if info['min'] is not None:
583                        fid.variables[q + '.extrema'][0] = info['min']
584                        fid.variables[q + '.min_location'][:] =\
585                                        info['min_location']
586                        fid.variables[q + '.min_time'][0] = info['min_time']
587                       
588                    if info['max'] is not None:
589                        fid.variables[q + '.extrema'][1] = info['max']
590                        fid.variables[q + '.max_location'][:] =\
591                                        info['max_location']
592                        fid.variables[q + '.max_time'][0] = info['max_time']
593
594           
595
596            #Flush and close
597            fid.sync()
598            fid.close()
599
600
601
602# Class for handling checkpoints data
603class Data_format_cpt(Data_format):
604    """Interface to native NetCDF format (.cpt)
605    """
606
607
608    def __init__(self, domain, mode = 'w'):
609        from Scientific.IO.NetCDF import NetCDFFile
610        from Numeric import Int, Float, Float
611
612        self.precision = Float #Use full precision
613
614        Data_format.__init__(self, domain, 'sww', mode)
615
616
617        # NetCDF file definition
618        fid = NetCDFFile(self.filename, mode)
619
620        if mode == 'w':
621            #Create new file
622            fid.institution = 'Geoscience Australia'
623            fid.description = 'Checkpoint data'
624            #fid.smooth = domain.smooth
625            fid.order = domain.default_order
626
627            # dimension definitions
628            fid.createDimension('number_of_volumes', self.number_of_volumes)
629            fid.createDimension('number_of_vertices', 3)
630
631            #Store info at all vertices (no smoothing)
632            fid.createDimension('number_of_points', 3*self.number_of_volumes)
633            fid.createDimension('number_of_timesteps', None) #extensible
634
635            # variable definitions
636
637            #Mesh
638            fid.createVariable('x', self.precision, ('number_of_points',))
639            fid.createVariable('y', self.precision, ('number_of_points',))
640
641
642            fid.createVariable('volumes', Int, ('number_of_volumes',
643                                                'number_of_vertices'))
644
645            fid.createVariable('time', self.precision,
646                               ('number_of_timesteps',))
647
648            #Allocate space for all quantities
649            for name in domain.quantities.keys():
650                fid.createVariable(name, self.precision,
651                                   ('number_of_timesteps',
652                                    'number_of_points'))
653
654        #Close
655        fid.close()
656
657
658    def store_checkpoint(self):
659        """
660        Write x,y coordinates of triangles.
661        Write connectivity (
662        constituting
663        the bed elevation.
664        """
665
666        from Scientific.IO.NetCDF import NetCDFFile
667
668        from Numeric import concatenate
669
670        domain = self.domain
671
672        #Get NetCDF
673        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
674
675        # Get the variables
676        x = fid.variables['x']
677        y = fid.variables['y']
678
679        volumes = fid.variables['volumes']
680
681        # Get X, Y and bed elevation Z
682        Q = domain.quantities['elevation']
683        X,Y,Z,V = Q.get_vertex_values(xy=True,
684                      precision = self.precision)
685
686
687
688        x[:] = X.astype(self.precision)
689        y[:] = Y.astype(self.precision)
690        z[:] = Z.astype(self.precision)
691
692        volumes[:] = V
693
694        #Close
695        fid.close()
696
697
698    def store_timestep(self, name):
699        """Store time and named quantity to file
700        """
701        from Scientific.IO.NetCDF import NetCDFFile
702        from time import sleep
703
704        #Get NetCDF
705        retries = 0
706        file_open = False
707        while not file_open and retries < 10:
708            try:
709                fid = NetCDFFile(self.filename, 'a') #Open existing file
710            except IOError:
711                #This could happen if someone was reading the file.
712                #In that case, wait a while and try again
713                msg = 'Warning (store_timestep): File %s could not be opened'\
714                  %self.filename
715                msg += ' - trying again'
716                print msg
717                retries += 1
718                sleep(1)
719            else:
720                file_open = True
721
722        if not file_open:
723            msg = 'File %s could not be opened for append' %self.filename
724            raise DataFileNotOPenError, msg
725
726
727        domain = self.domain
728
729        # Get the variables
730        time = fid.variables['time']
731        stage = fid.variables['stage']
732        i = len(time)
733
734        #Store stage
735        time[i] = self.domain.time
736
737        # Get quantity
738        Q = domain.quantities[name]
739        A,V = Q.get_vertex_values(xy=False,
740                                  precision = self.precision)
741
742        stage[i,:] = A.astype(self.precision)
743
744        #Flush and close
745        fid.sync()
746        fid.close()
747
748
749#### NED is national exposure database (name changed to NEXIS)
750   
751LAT_TITLE = 'LATITUDE'
752LONG_TITLE = 'LONGITUDE'
753X_TITLE = 'x'
754Y_TITLE = 'y'
755class Exposure_csv:
756    def __init__(self,file_name, latitude_title=LAT_TITLE,
757                 longitude_title=LONG_TITLE, is_x_y_locations=None,
758                 x_title=X_TITLE, y_title=Y_TITLE,
759                 refine_polygon=None, title_check_list=None):
760        """
761        This class is for handling the exposure csv file.
762        It reads the file in and converts the lats and longs to a geospatial
763        data object.
764        Use the methods to read and write columns.
765
766        The format of the csv files it reads is;
767           The first row is a title row.
768           comma's are the delimiters
769           each column is a 'set' of data
770
771        Feel free to use/expand it to read other csv files.
772           
773           
774        It is not for adding and deleting rows
775       
776        Can geospatial handle string attributes? It's not made for them.
777        Currently it can't load and save string att's.
778
779        So just use geospatial to hold the x, y and georef? Bad, since
780        different att's are in diferent structures.  Not so bad, the info
781        to write if the .csv file is saved is in attribute_dic
782
783        The location info is in the geospatial attribute.
784       
785       
786        """
787        self._file_name = file_name
788        self._geospatial = None #
789
790        # self._attribute_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are lists of column data
793       
794        # self._title_index_dic is a dictionary.
795        #The keys are the column titles.
796        #The values are the index positions of file columns.
797        self._attribute_dic, self._title_index_dic = \
798            csv2dict(self._file_name, title_check_list=title_check_list)
799        try:
800            #Have code here that handles caps or lower
801            lats = self._attribute_dic[latitude_title]
802            longs = self._attribute_dic[longitude_title]
803           
804        except KeyError:
805            # maybe a warning..
806            #Let's see if this works..
807            if False != is_x_y_locations:
808                is_x_y_locations = True
809            pass
810        else:
811            self._geospatial = Geospatial_data(latitudes = lats,
812                 longitudes = longs)
813
814        if is_x_y_locations is True:
815            if self._geospatial is not None:
816                pass #fixme throw an error
817            try:
818                xs = self._attribute_dic[x_title]
819                ys = self._attribute_dic[y_title]
820                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
821            except KeyError:
822                # maybe a warning..
823                msg = "Could not find location information."
824                raise TitleValueError, msg
825            else:
826                self._geospatial = Geospatial_data(data_points=points)
827           
828        # create a list of points that are in the refining_polygon
829        # described by a list of indexes representing the points
830
831    def __cmp__(self, other):
832        #print "self._attribute_dic",self._attribute_dic
833        #print "other._attribute_dic",other._attribute_dic
834        #print "self._title_index_dic", self._title_index_dic
835        #print "other._title_index_dic", other._title_index_dic
836       
837        #check that a is an instance of this class
838        if isinstance(self, type(other)):
839            result = cmp(self._attribute_dic, other._attribute_dic)
840            if result <>0:
841                return result
842            # The order of the columns is important. Therefore..
843            result = cmp(self._title_index_dic, other._title_index_dic)
844            if result <>0:
845                return result
846            for self_ls, other_ls in map(None,self._attribute_dic, \
847                    other._attribute_dic):
848                result = cmp(self._attribute_dic[self_ls],
849                             other._attribute_dic[other_ls])
850                if result <>0:
851                    return result
852            return 0
853        else:
854            return 1
855   
856
857    def get_column(self, column_name, use_refind_polygon=False):
858        """
859        Given a column name return a list of the column values
860
861        Note, the type of the values will be String!
862        do this to change a list of strings to a list of floats
863        time = [float(x) for x in time]
864       
865        Not implemented:
866        if use_refind_polygon is True, only return values in the
867        refined polygon
868        """
869        if not self._attribute_dic.has_key(column_name):
870            msg = 'Therer is  no column called %s!' %column_name
871            raise TitleValueError, msg
872        return self._attribute_dic[column_name]
873
874
875    def get_value(self, value_column_name,
876                  known_column_name,
877                  known_values,
878                  use_refind_polygon=False):
879        """
880        Do linear interpolation on the known_colum, using the known_value,
881        to return a value of the column_value_name.
882        """
883        pass
884
885
886    def get_location(self, use_refind_polygon=False):
887        """
888        Return a geospatial object which describes the
889        locations of the location file.
890
891        Note, if there is not location info, this returns None.
892       
893        Not implemented:
894        if use_refind_polygon is True, only return values in the
895        refined polygon
896        """
897        return self._geospatial
898
899    def set_column(self, column_name, column_values, overwrite=False):
900        """
901        Add a column to the 'end' (with the right most column being the end)
902        of the csv file.
903
904        Set overwrite to True if you want to overwrite a column.
905       
906        Note, in column_name white space is removed and case is not checked.
907        Precondition
908        The column_name and column_values cannot have comma's in it.
909        """
910       
911        value_row_count = \
912            len(self._attribute_dic[self._title_index_dic.keys()[0]])
913        if len(column_values) <> value_row_count: 
914            msg = 'The number of column values must equal the number of rows.'
915            raise DataMissingValuesError, msg
916       
917        if self._attribute_dic.has_key(column_name):
918            if not overwrite:
919                msg = 'Column name %s already in use!' %column_name
920                raise TitleValueError, msg
921        else:
922            # New title.  Add it to the title index.
923            self._title_index_dic[column_name] = len(self._title_index_dic)
924        self._attribute_dic[column_name] = column_values
925        #print "self._title_index_dic[column_name]",self._title_index_dic
926
927    def save(self, file_name=None):
928        """
929        Save the exposure csv file
930        """
931        if file_name is None:
932            file_name = self._file_name
933       
934        fd = open(file_name,'wb')
935        writer = csv.writer(fd)
936       
937        #Write the title to a cvs file
938        line = [None]* len(self._title_index_dic)
939        for title in self._title_index_dic.iterkeys():
940            line[self._title_index_dic[title]]= title
941        writer.writerow(line)
942       
943        # Write the values to a cvs file
944        value_row_count = \
945            len(self._attribute_dic[self._title_index_dic.keys()[0]])
946        for row_i in range(value_row_count):
947            line = [None]* len(self._title_index_dic)
948            for title in self._title_index_dic.iterkeys():
949                line[self._title_index_dic[title]]= \
950                     self._attribute_dic[title][row_i]
951            writer.writerow(line)
952
953
954def csv2dict(file_name, title_check_list=None):
955    """
956    Load in the csv as a dic, title as key and column info as value, .
957    Also, create a dic, title as key and column index as value,
958    to keep track of the column order.
959
960    Two dictionaries are returned.
961   
962    WARNING: Vaules are returned as strings.
963    do this to change a list of strings to a list of floats
964        time = [float(x) for x in time]
965
966       
967    """
968   
969    #
970    attribute_dic = {}
971    title_index_dic = {}
972    titles_stripped = [] # list of titles
973    reader = csv.reader(file(file_name))
974
975    # Read in and manipulate the title info
976    titles = reader.next()
977    for i,title in enumerate(titles):
978        titles_stripped.append(title.strip())
979        title_index_dic[title.strip()] = i
980    title_count = len(titles_stripped)       
981    #print "title_index_dic",title_index_dic
982    if title_check_list is not None:
983        for title_check in title_check_list:
984            #msg = "Reading error.  This row is not present ", title_check
985            #assert title_index_dic.has_key(title_check), msg
986            if not title_index_dic.has_key(title_check):
987                #reader.close()
988                msg = "Reading error.  This row is not present ", \
989                      title_check                     
990                raise IOError, msg
991               
992   
993   
994    #create a dic of colum values, indexed by column title
995    for line in reader:
996        if len(line) <> title_count:
997            raise IOError #FIXME make this nicer
998        for i, value in enumerate(line):
999            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
1000       
1001    return attribute_dic, title_index_dic
1002
1003
1004#Auxiliary
1005def write_obj(filename,x,y,z):
1006    """Store x,y,z vectors into filename (obj format)
1007       Vectors are assumed to have dimension (M,3) where
1008       M corresponds to the number elements.
1009       triangles are assumed to be disconnected
1010
1011       The three numbers in each vector correspond to three vertices,
1012
1013       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1014
1015    """
1016    #print 'Writing obj to %s' % filename
1017
1018    import os.path
1019
1020    root, ext = os.path.splitext(filename)
1021    if ext == '.obj':
1022        FN = filename
1023    else:
1024        FN = filename + '.obj'
1025
1026
1027    outfile = open(FN, 'wb')
1028    outfile.write("# Triangulation as an obj file\n")
1029
1030    M, N = x.shape
1031    assert N==3  #Assuming three vertices per element
1032
1033    for i in range(M):
1034        for j in range(N):
1035            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1036
1037    for i in range(M):
1038        base = i*N
1039        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1040
1041    outfile.close()
1042
1043
1044#########################################################
1045#Conversion routines
1046########################################################
1047
1048def sww2obj(basefilename, size):
1049    """Convert netcdf based data output to obj
1050    """
1051    from Scientific.IO.NetCDF import NetCDFFile
1052
1053    from Numeric import Float, zeros
1054
1055    #Get NetCDF
1056    FN = create_filename('.', basefilename, 'sww', size)
1057    print 'Reading from ', FN
1058    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1059
1060
1061    # Get the variables
1062    x = fid.variables['x']
1063    y = fid.variables['y']
1064    z = fid.variables['elevation']
1065    time = fid.variables['time']
1066    stage = fid.variables['stage']
1067
1068    M = size  #Number of lines
1069    xx = zeros((M,3), Float)
1070    yy = zeros((M,3), Float)
1071    zz = zeros((M,3), Float)
1072
1073    for i in range(M):
1074        for j in range(3):
1075            xx[i,j] = x[i+j*M]
1076            yy[i,j] = y[i+j*M]
1077            zz[i,j] = z[i+j*M]
1078
1079    #Write obj for bathymetry
1080    FN = create_filename('.', basefilename, 'obj', size)
1081    write_obj(FN,xx,yy,zz)
1082
1083
1084    #Now read all the data with variable information, combine with
1085    #x,y info and store as obj
1086
1087    for k in range(len(time)):
1088        t = time[k]
1089        print 'Processing timestep %f' %t
1090
1091        for i in range(M):
1092            for j in range(3):
1093                zz[i,j] = stage[k,i+j*M]
1094
1095
1096        #Write obj for variable data
1097        #FN = create_filename(basefilename, 'obj', size, time=t)
1098        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1099        write_obj(FN,xx,yy,zz)
1100
1101
1102def dat2obj(basefilename):
1103    """Convert line based data output to obj
1104    FIXME: Obsolete?
1105    """
1106
1107    import glob, os
1108    from anuga.config import data_dir
1109
1110
1111    #Get bathymetry and x,y's
1112    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1113
1114    from Numeric import zeros, Float
1115
1116    M = len(lines)  #Number of lines
1117    x = zeros((M,3), Float)
1118    y = zeros((M,3), Float)
1119    z = zeros((M,3), Float)
1120
1121    ##i = 0
1122    for i, line in enumerate(lines):
1123        tokens = line.split()
1124        values = map(float,tokens)
1125
1126        for j in range(3):
1127            x[i,j] = values[j*3]
1128            y[i,j] = values[j*3+1]
1129            z[i,j] = values[j*3+2]
1130
1131        ##i += 1
1132
1133
1134    #Write obj for bathymetry
1135    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1136
1137
1138    #Now read all the data files with variable information, combine with
1139    #x,y info
1140    #and store as obj
1141
1142    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1143
1144    for filename in files:
1145        print 'Processing %s' % filename
1146
1147        lines = open(data_dir+os.sep+filename,'r').readlines()
1148        assert len(lines) == M
1149        root, ext = os.path.splitext(filename)
1150
1151        #Get time from filename
1152        i0 = filename.find('_time=')
1153        if i0 == -1:
1154            #Skip bathymetry file
1155            continue
1156
1157        i0 += 6  #Position where time starts
1158        i1 = filename.find('.dat')
1159
1160        if i1 > i0:
1161            t = float(filename[i0:i1])
1162        else:
1163            raise DataTimeError, 'Hmmmm'
1164
1165
1166
1167        ##i = 0
1168        for i, line in enumerate(lines):
1169            tokens = line.split()
1170            values = map(float,tokens)
1171
1172            for j in range(3):
1173                z[i,j] = values[j]
1174
1175            ##i += 1
1176
1177        #Write obj for variable data
1178        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1179
1180
1181def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1182    """Read netcdf filename1, pick timesteps first:step:last and save to
1183    nettcdf file filename2
1184    """
1185    from Scientific.IO.NetCDF import NetCDFFile
1186
1187    #Get NetCDF
1188    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1189    outfile = NetCDFFile(filename2, 'w')  #Open new file
1190
1191
1192    #Copy dimensions
1193    for d in infile.dimensions:
1194        outfile.createDimension(d, infile.dimensions[d])
1195
1196    for name in infile.variables:
1197        var = infile.variables[name]
1198        outfile.createVariable(name, var.typecode(), var.dimensions)
1199
1200
1201    #Copy the static variables
1202    for name in infile.variables:
1203        if name == 'time' or name == 'stage':
1204            pass
1205        else:
1206            #Copy
1207            outfile.variables[name][:] = infile.variables[name][:]
1208
1209    #Copy selected timesteps
1210    time = infile.variables['time']
1211    stage = infile.variables['stage']
1212
1213    newtime = outfile.variables['time']
1214    newstage = outfile.variables['stage']
1215
1216    if last is None:
1217        last = len(time)
1218
1219    selection = range(first, last, step)
1220    for i, j in enumerate(selection):
1221        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1222        newtime[i] = time[j]
1223        newstage[i,:] = stage[j,:]
1224
1225    #Close
1226    infile.close()
1227    outfile.close()
1228
1229
1230#Get data objects
1231def get_dataobject(domain, mode='w'):
1232    """Return instance of class of given format using filename
1233    """
1234
1235    cls = eval('Data_format_%s' %domain.format)
1236    return cls(domain, mode)
1237
1238
1239
1240
1241def dem2pts(basename_in, basename_out=None,
1242            easting_min=None, easting_max=None,
1243            northing_min=None, northing_max=None,
1244            use_cache=False, verbose=False,):
1245    """Read Digitial Elevation model from the following NetCDF format (.dem)
1246
1247    Example:
1248
1249    ncols         3121
1250    nrows         1800
1251    xllcorner     722000
1252    yllcorner     5893000
1253    cellsize      25
1254    NODATA_value  -9999
1255    138.3698 137.4194 136.5062 135.5558 ..........
1256
1257    Convert to NetCDF pts format which is
1258
1259    points:  (Nx2) Float array
1260    elevation: N Float array
1261    """
1262
1263
1264
1265    kwargs = {'basename_out': basename_out,
1266              'easting_min': easting_min,
1267              'easting_max': easting_max,
1268              'northing_min': northing_min,
1269              'northing_max': northing_max,
1270              'verbose': verbose}
1271
1272    if use_cache is True:
1273        from caching import cache
1274        result = cache(_dem2pts, basename_in, kwargs,
1275                       dependencies = [basename_in + '.dem'],
1276                       verbose = verbose)
1277
1278    else:
1279        result = apply(_dem2pts, [basename_in], kwargs)
1280
1281    return result
1282
1283
1284def _dem2pts(basename_in, basename_out=None, verbose=False,
1285            easting_min=None, easting_max=None,
1286            northing_min=None, northing_max=None):
1287    """Read Digitial Elevation model from the following NetCDF format (.dem)
1288
1289    Internal function. See public function dem2pts for details.
1290    """
1291
1292    # FIXME: Can this be written feasibly using write_pts?
1293
1294    import os
1295    from Scientific.IO.NetCDF import NetCDFFile
1296    from Numeric import Float, zeros, reshape, sum
1297
1298    root = basename_in
1299
1300    # Get NetCDF
1301    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
1302
1303    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1304
1305    ncols = infile.ncols[0]
1306    nrows = infile.nrows[0]
1307    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1308    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
1309    cellsize = infile.cellsize[0]
1310    NODATA_value = infile.NODATA_value[0]
1311    dem_elevation = infile.variables['elevation']
1312
1313    zone = infile.zone[0]
1314    false_easting = infile.false_easting[0]
1315    false_northing = infile.false_northing[0]
1316
1317    # Text strings
1318    projection = infile.projection
1319    datum = infile.datum
1320    units = infile.units
1321
1322
1323    # Get output file
1324    if basename_out == None:
1325        ptsname = root + '.pts'
1326    else:
1327        ptsname = basename_out + '.pts'
1328
1329    if verbose: print 'Store to NetCDF file %s' %ptsname
1330    # NetCDF file definition
1331    outfile = NetCDFFile(ptsname, 'w')
1332
1333    # Create new file
1334    outfile.institution = 'Geoscience Australia'
1335    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1336                      'of spatial point data'
1337    # Assign default values
1338    if easting_min is None: easting_min = xllcorner
1339    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1340    if northing_min is None: northing_min = yllcorner
1341    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1342
1343    # Compute offsets to update georeferencing
1344    easting_offset = xllcorner - easting_min
1345    northing_offset = yllcorner - northing_min
1346
1347    # Georeferencing
1348    outfile.zone = zone
1349    outfile.xllcorner = easting_min # Easting of lower left corner
1350    outfile.yllcorner = northing_min # Northing of lower left corner
1351    outfile.false_easting = false_easting
1352    outfile.false_northing = false_northing
1353
1354    outfile.projection = projection
1355    outfile.datum = datum
1356    outfile.units = units
1357
1358
1359    # Grid info (FIXME: probably not going to be used, but heck)
1360    outfile.ncols = ncols
1361    outfile.nrows = nrows
1362
1363    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1364    totalnopoints = nrows*ncols
1365
1366    # Calculating number of NODATA_values for each row in clipped region
1367    # FIXME: use array operations to do faster
1368    nn = 0
1369    k = 0
1370    i1_0 = 0
1371    j1_0 = 0
1372    thisj = 0
1373    thisi = 0
1374    for i in range(nrows):
1375        y = (nrows-i-1)*cellsize + yllcorner
1376        for j in range(ncols):
1377            x = j*cellsize + xllcorner
1378            if easting_min <= x <= easting_max and \
1379               northing_min <= y <= northing_max:
1380                thisj = j
1381                thisi = i
1382                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1383
1384                if k == 0:
1385                    i1_0 = i
1386                    j1_0 = j
1387                k += 1
1388
1389    index1 = j1_0
1390    index2 = thisj
1391
1392    # Dimension definitions
1393    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1394    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1395
1396    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1397    nopoints = clippednopoints-nn
1398
1399    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1400
1401    if verbose:
1402        print 'There are %d values in the elevation' %totalnopoints
1403        print 'There are %d values in the clipped elevation' %clippednopoints
1404        print 'There are %d NODATA_values in the clipped elevation' %nn
1405
1406    outfile.createDimension('number_of_points', nopoints)
1407    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1408
1409    # Variable definitions
1410    outfile.createVariable('points', Float, ('number_of_points',
1411                                             'number_of_dimensions'))
1412    outfile.createVariable('elevation', Float, ('number_of_points',))
1413
1414    # Get handles to the variables
1415    points = outfile.variables['points']
1416    elevation = outfile.variables['elevation']
1417
1418    lenv = index2-index1+1
1419    # Store data
1420    global_index = 0
1421    # for i in range(nrows):
1422    for i in range(i1_0,thisi+1,1):
1423        if verbose and i%((nrows+10)/10)==0:
1424            print 'Processing row %d of %d' %(i, nrows)
1425
1426        lower_index = global_index
1427
1428        v = dem_elevation_r[i,index1:index2+1]
1429        no_NODATA = sum(v == NODATA_value)
1430        if no_NODATA > 0:
1431            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
1432        else:
1433            newcols = lenv # ncols_in_bounding_box
1434
1435        telev = zeros(newcols, Float)
1436        tpoints = zeros((newcols, 2), Float)
1437
1438        local_index = 0
1439
1440        y = (nrows-i-1)*cellsize + yllcorner
1441        #for j in range(ncols):
1442        for j in range(j1_0,index2+1,1):
1443
1444            x = j*cellsize + xllcorner
1445            if easting_min <= x <= easting_max and \
1446               northing_min <= y <= northing_max and \
1447               dem_elevation_r[i,j] <> NODATA_value:
1448                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1449                telev[local_index] = dem_elevation_r[i, j]
1450                global_index += 1
1451                local_index += 1
1452
1453        upper_index = global_index
1454
1455        if upper_index == lower_index + newcols:
1456            points[lower_index:upper_index, :] = tpoints
1457            elevation[lower_index:upper_index] = telev
1458
1459    assert global_index == nopoints, 'index not equal to number of points'
1460
1461    infile.close()
1462    outfile.close()
1463
1464
1465
1466def _read_hecras_cross_sections(lines):
1467    """Return block of surface lines for each cross section
1468    Starts with SURFACE LINE,
1469    Ends with END CROSS-SECTION
1470    """
1471
1472    points = []
1473
1474    reading_surface = False
1475    for i, line in enumerate(lines):
1476
1477        if len(line.strip()) == 0:    #Ignore blanks
1478            continue
1479
1480        if lines[i].strip().startswith('SURFACE LINE'):
1481            reading_surface = True
1482            continue
1483
1484        if lines[i].strip().startswith('END') and reading_surface:
1485            yield points
1486            reading_surface = False
1487            points = []
1488
1489        if reading_surface:
1490            fields = line.strip().split(',')
1491            easting = float(fields[0])
1492            northing = float(fields[1])
1493            elevation = float(fields[2])
1494            points.append([easting, northing, elevation])
1495
1496
1497
1498
1499def hecras_cross_sections2pts(basename_in,
1500                              basename_out=None,
1501                              verbose=False):
1502    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1503
1504    Example:
1505
1506
1507# RAS export file created on Mon 15Aug2005 11:42
1508# by HEC-RAS Version 3.1.1
1509
1510BEGIN HEADER:
1511  UNITS: METRIC
1512  DTM TYPE: TIN
1513  DTM: v:\1\cit\perth_topo\river_tin
1514  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1515  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1516  MAP PROJECTION: UTM
1517  PROJECTION ZONE: 50
1518  DATUM: AGD66
1519  VERTICAL DATUM:
1520  NUMBER OF REACHES:  19
1521  NUMBER OF CROSS-SECTIONS:  14206
1522END HEADER:
1523
1524
1525Only the SURFACE LINE data of the following form will be utilised
1526
1527  CROSS-SECTION:
1528    STREAM ID:Southern-Wungong
1529    REACH ID:Southern-Wungong
1530    STATION:19040.*
1531    CUT LINE:
1532      405548.671603161 , 6438142.7594925
1533      405734.536092045 , 6438326.10404912
1534      405745.130459356 , 6438331.48627354
1535      405813.89633823 , 6438368.6272789
1536    SURFACE LINE:
1537     405548.67,   6438142.76,   35.37
1538     405552.24,   6438146.28,   35.41
1539     405554.78,   6438148.78,   35.44
1540     405555.80,   6438149.79,   35.44
1541     405559.37,   6438153.31,   35.45
1542     405560.88,   6438154.81,   35.44
1543     405562.93,   6438156.83,   35.42
1544     405566.50,   6438160.35,   35.38
1545     405566.99,   6438160.83,   35.37
1546     ...
1547   END CROSS-SECTION
1548
1549    Convert to NetCDF pts format which is
1550
1551    points:  (Nx2) Float array
1552    elevation: N Float array
1553    """
1554
1555    import os
1556    from Scientific.IO.NetCDF import NetCDFFile
1557    from Numeric import Float, zeros, reshape
1558
1559    root = basename_in
1560
1561    #Get ASCII file
1562    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1563
1564    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1565
1566    lines = infile.readlines()
1567    infile.close()
1568
1569    if verbose: print 'Converting to pts format'
1570
1571    i = 0
1572    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1573        i += 1
1574
1575    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1576    i += 1
1577
1578    assert lines[i].strip().upper().startswith('UNITS:')
1579    units = lines[i].strip().split()[1]
1580    i += 1
1581
1582    assert lines[i].strip().upper().startswith('DTM TYPE:')
1583    i += 1
1584
1585    assert lines[i].strip().upper().startswith('DTM:')
1586    i += 1
1587
1588    assert lines[i].strip().upper().startswith('STREAM')
1589    i += 1
1590
1591    assert lines[i].strip().upper().startswith('CROSS')
1592    i += 1
1593
1594    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1595    projection = lines[i].strip().split(':')[1]
1596    i += 1
1597
1598    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1599    zone = int(lines[i].strip().split(':')[1])
1600    i += 1
1601
1602    assert lines[i].strip().upper().startswith('DATUM:')
1603    datum = lines[i].strip().split(':')[1]
1604    i += 1
1605
1606    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1607    i += 1
1608
1609    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1610    i += 1
1611
1612    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1613    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1614    i += 1
1615
1616
1617    #Now read all points
1618    points = []
1619    elevation = []
1620    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1621        for k, entry in enumerate(entries):
1622            points.append(entry[:2])
1623            elevation.append(entry[2])
1624
1625
1626    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1627          %(j+1, number_of_cross_sections)
1628    assert j+1 == number_of_cross_sections, msg
1629
1630    #Get output file
1631    if basename_out == None:
1632        ptsname = root + '.pts'
1633    else:
1634        ptsname = basename_out + '.pts'
1635
1636    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1637    geo = Geospatial_data(points, {"elevation":elevation},
1638                          verbose=verbose, geo_reference=geo_ref)
1639    geo.export_points_file(ptsname)
1640
1641def export_grid(basename_in, extra_name_out = None,
1642                quantities = None, # defaults to elevation
1643                timestep = None,
1644                reduction = None,
1645                cellsize = 10,
1646                NODATA_value = -9999,
1647                easting_min = None,
1648                easting_max = None,
1649                northing_min = None,
1650                northing_max = None,
1651                verbose = False,
1652                origin = None,
1653                datum = 'WGS84',
1654                format = 'ers'):
1655    """
1656   
1657    Wrapper for sww2dem. - see sww2dem to find out what most of the
1658    parameters do.
1659
1660    Quantities is a list of quantities.  Each quantity will be
1661    calculated for each sww file.
1662
1663    This returns the basenames of the files returned, which is made up
1664    of the dir and all of the file name, except the extension.
1665
1666    This function returns the names of the files produced.
1667
1668    It will also produce as many output files as there are input sww files.
1669    """
1670   
1671    if quantities is None:
1672        quantities = ['elevation']
1673       
1674    if type(quantities) is str:
1675            quantities = [quantities]
1676
1677    # How many sww files are there?
1678    dir, base = os.path.split(basename_in)
1679    #print "basename_in",basename_in
1680    #print "base",base
1681
1682    iterate_over = get_all_swwfiles(dir,base,verbose)
1683   
1684    if dir == "":
1685        dir = "." # Unix compatibility
1686   
1687    files_out = []
1688    #print 'sww_file',iterate_over
1689    for sww_file in iterate_over:
1690        for quantity in quantities:
1691            if extra_name_out is None:
1692                basename_out = sww_file + '_' + quantity
1693            else:
1694                basename_out = sww_file + '_' + quantity + '_' \
1695                               + extra_name_out
1696#            print "basename_out", basename_out
1697       
1698            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
1699                               quantity, 
1700                               timestep,
1701                               reduction,
1702                               cellsize,
1703                               NODATA_value,
1704                               easting_min,
1705                               easting_max,
1706                               northing_min,
1707                               northing_max,
1708                               verbose,
1709                               origin,
1710                               datum,
1711                               format)
1712            files_out.append(file_out)
1713    #print "basenames_out after",basenames_out
1714    return files_out
1715
1716
1717def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1718                   plot_quantity, generate_fig = False,
1719                   reportname = None, surface = False, time_min = None,
1720                   time_max = None, title_on = False, verbose = True,
1721                   nodes=None):
1722    """
1723    nodes - number of processes used.
1724
1725    warning - this function has no tests
1726    """
1727    if reportname == None:
1728        report = False
1729    else:
1730        report = True
1731       
1732    if nodes is None:
1733        is_parallel = False
1734    else:
1735        is_parallel = True
1736       
1737    # Generate figures
1738    swwfiles = {}
1739   
1740    if is_parallel is True:   
1741        for i in range(nodes):
1742            print 'Sending node %d of %d' %(i,nodes)
1743            swwfiles = {}
1744            if not reportname == None:
1745                reportname = report_name + '_%s' %(i)
1746            for label_id in production_dirs.keys():
1747                if label_id == 'boundaries':
1748                    swwfile = best_boundary_sww
1749                else:
1750                    file_loc = output_dir + label_id + sep
1751                    sww_extra = '_P%s_%s' %(i,nodes)
1752                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1753                    print 'swwfile',swwfile
1754                    swwfiles[swwfile] = label_id
1755
1756                texname, elev_output = sww2timeseries(swwfiles,
1757                                              gauges_dir_name,
1758                                              production_dirs,
1759                                              report = report,
1760                                              reportname = reportname,
1761                                              plot_quantity = plot_quantity,
1762                                              generate_fig = generate_fig,
1763                                              surface = surface,
1764                                              time_min = time_min,
1765                                              time_max = time_max,
1766                                              title_on = title_on,
1767                                              verbose = verbose)
1768    else:   
1769        for label_id in production_dirs.keys():       
1770            if label_id == 'boundaries':
1771                print 'boundaries'
1772                file_loc = project.boundaries_in_dir
1773                swwfile = project.boundaries_dir_name3 + '.sww'
1774                #  swwfile = boundary_dir_filename
1775            else:
1776                file_loc = output_dir + label_id + sep
1777                swwfile = file_loc + scenario_name + '.sww'
1778            swwfiles[swwfile] = label_id
1779       
1780        texname, elev_output = sww2timeseries(swwfiles,
1781                                              gauges_dir_name,
1782                                              production_dirs,
1783                                              report = report,
1784                                              reportname = reportname,
1785                                              plot_quantity = plot_quantity,
1786                                              generate_fig = generate_fig,
1787                                              surface = surface,
1788                                              time_min = time_min,
1789                                              time_max = time_max,
1790                                              title_on = title_on,
1791                                              verbose = verbose)
1792                                         
1793
1794   
1795def sww2dem(basename_in, basename_out = None,
1796            quantity = None, # defaults to elevation
1797            timestep = None,
1798            reduction = None,
1799            cellsize = 10,
1800            NODATA_value = -9999,
1801            easting_min = None,
1802            easting_max = None,
1803            northing_min = None,
1804            northing_max = None,
1805            verbose = False,
1806            origin = None,
1807            datum = 'WGS84',
1808            format = 'ers'):
1809
1810    """Read SWW file and convert to Digitial Elevation model format
1811    (.asc or .ers)
1812
1813    Example (ASC):
1814
1815    ncols         3121
1816    nrows         1800
1817    xllcorner     722000
1818    yllcorner     5893000
1819    cellsize      25
1820    NODATA_value  -9999
1821    138.3698 137.4194 136.5062 135.5558 ..........
1822
1823    Also write accompanying file with same basename_in but extension .prj
1824    used to fix the UTM zone, datum, false northings and eastings.
1825
1826    The prj format is assumed to be as
1827
1828    Projection    UTM
1829    Zone          56
1830    Datum         WGS84
1831    Zunits        NO
1832    Units         METERS
1833    Spheroid      WGS84
1834    Xshift        0.0000000000
1835    Yshift        10000000.0000000000
1836    Parameters
1837
1838
1839    The parameter quantity must be the name of an existing quantity or
1840    an expression involving existing quantities. The default is
1841    'elevation'. Quantity is not a list of quantities.
1842
1843    if timestep (an index) is given, output quantity at that timestep
1844
1845    if reduction is given use that to reduce quantity over all timesteps.
1846
1847    datum
1848
1849    format can be either 'asc' or 'ers'
1850    """
1851
1852    import sys
1853    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1854         sometrue
1855    from Numeric import array2string
1856
1857    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1858         separate_points_by_polygon
1859    from anuga.abstract_2d_finite_volumes.util import \
1860         apply_expression_to_dictionary
1861
1862    msg = 'Format must be either asc or ers'
1863    assert format.lower() in ['asc', 'ers'], msg
1864
1865
1866    false_easting = 500000
1867    false_northing = 10000000
1868
1869    if quantity is None:
1870        quantity = 'elevation'
1871       
1872    if reduction is None:
1873        reduction = max
1874
1875    if basename_out is None:
1876        basename_out = basename_in + '_%s' %quantity
1877
1878    if quantity_formula.has_key(quantity):
1879        quantity = quantity_formula[quantity]
1880       
1881    swwfile = basename_in + '.sww'
1882    demfile = basename_out + '.' + format
1883    # Note the use of a .ers extension is optional (write_ermapper_grid will
1884    # deal with either option
1885   
1886    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
1887   
1888    # Read sww file
1889    if verbose: 
1890        print 'Reading from %s' %swwfile
1891        print 'Output directory is %s' %basename_out
1892   
1893    from Scientific.IO.NetCDF import NetCDFFile
1894    fid = NetCDFFile(swwfile)
1895
1896    #Get extent and reference
1897    x = fid.variables['x'][:]
1898    y = fid.variables['y'][:]
1899    volumes = fid.variables['volumes'][:]
1900    if timestep is not None:
1901        times = fid.variables['time'][timestep]
1902    else:
1903        times = fid.variables['time'][:]
1904
1905    number_of_timesteps = fid.dimensions['number_of_timesteps']
1906    number_of_points = fid.dimensions['number_of_points']
1907   
1908    if origin is None:
1909
1910        # Get geo_reference
1911        # sww files don't have to have a geo_ref
1912        try:
1913            geo_reference = Geo_reference(NetCDFObject=fid)
1914        except AttributeError, e:
1915            geo_reference = Geo_reference() # Default georef object
1916
1917        xllcorner = geo_reference.get_xllcorner()
1918        yllcorner = geo_reference.get_yllcorner()
1919        zone = geo_reference.get_zone()
1920    else:
1921        zone = origin[0]
1922        xllcorner = origin[1]
1923        yllcorner = origin[2]
1924
1925
1926
1927    # FIXME: Refactor using code from Interpolation_function.statistics
1928    # (in interpolate.py)
1929    # Something like print swwstats(swwname)
1930    if verbose:
1931        print '------------------------------------------------'
1932        print 'Statistics of SWW file:'
1933        print '  Name: %s' %swwfile
1934        print '  Reference:'
1935        print '    Lower left corner: [%f, %f]'\
1936              %(xllcorner, yllcorner)
1937        if timestep is not None:
1938            print '    Time: %f' %(times)
1939        else:
1940            print '    Start time: %f' %fid.starttime[0]
1941        print '  Extent:'
1942        print '    x [m] in [%f, %f], len(x) == %d'\
1943              %(min(x.flat), max(x.flat), len(x.flat))
1944        print '    y [m] in [%f, %f], len(y) == %d'\
1945              %(min(y.flat), max(y.flat), len(y.flat))
1946        if timestep is not None:
1947            print '    t [s] = %f, len(t) == %d' %(times, 1)
1948        else:
1949            print '    t [s] in [%f, %f], len(t) == %d'\
1950              %(min(times), max(times), len(times))
1951        print '  Quantities [SI units]:'
1952        for name in ['stage', 'xmomentum', 'ymomentum']:
1953            q = fid.variables[name][:].flat
1954            if timestep is not None:
1955                q = q[timestep*len(x):(timestep+1)*len(x)]
1956            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1957        for name in ['elevation']:
1958            q = fid.variables[name][:].flat
1959            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1960           
1961    # Get quantity and reduce if applicable
1962    if verbose: print 'Processing quantity %s' %quantity
1963
1964    # Turn NetCDF objects into Numeric arrays
1965    quantity_dict = {}
1966    for name in fid.variables.keys():
1967        quantity_dict[name] = fid.variables[name][:] 
1968
1969
1970    # Convert quantity expression to quantities found in sww file   
1971    q = apply_expression_to_dictionary(quantity, quantity_dict)
1972
1973    if len(q.shape) == 2:
1974        #q has a time component and needs to be reduced along
1975        #the temporal dimension
1976        if verbose: print 'Reducing quantity %s' %quantity
1977        q_reduced = zeros( number_of_points, Float )
1978       
1979        if timestep is not None:
1980            for k in range(number_of_points):
1981                q_reduced[k] = q[timestep,k] 
1982        else:
1983            for k in range(number_of_points):
1984                q_reduced[k] = reduction( q[:,k] )
1985
1986        q = q_reduced
1987
1988    #Post condition: Now q has dimension: number_of_points
1989    assert len(q.shape) == 1
1990    assert q.shape[0] == number_of_points
1991
1992
1993    if verbose:
1994        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1995
1996
1997    #Create grid and update xll/yll corner and x,y
1998
1999    #Relative extent
2000    if easting_min is None:
2001        xmin = min(x)
2002    else:
2003        xmin = easting_min - xllcorner
2004
2005    if easting_max is None:
2006        xmax = max(x)
2007    else:
2008        xmax = easting_max - xllcorner
2009
2010    if northing_min is None:
2011        ymin = min(y)
2012    else:
2013        ymin = northing_min - yllcorner
2014
2015    if northing_max is None:
2016        ymax = max(y)
2017    else:
2018        ymax = northing_max - yllcorner
2019
2020
2021
2022    if verbose: print 'Creating grid'
2023    ncols = int((xmax-xmin)/cellsize)+1
2024    nrows = int((ymax-ymin)/cellsize)+1
2025
2026
2027    #New absolute reference and coordinates
2028    newxllcorner = xmin+xllcorner
2029    newyllcorner = ymin+yllcorner
2030
2031    x = x+xllcorner-newxllcorner
2032    y = y+yllcorner-newyllcorner
2033   
2034    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2035    assert len(vertex_points.shape) == 2
2036
2037    grid_points = zeros ( (ncols*nrows, 2), Float )
2038
2039
2040    for i in xrange(nrows):
2041        if format.lower() == 'asc':
2042            yg = i*cellsize
2043        else:
2044        #this will flip the order of the y values for ers
2045            yg = (nrows-i)*cellsize
2046
2047        for j in xrange(ncols):
2048            xg = j*cellsize
2049            k = i*ncols + j
2050
2051            grid_points[k,0] = xg
2052            grid_points[k,1] = yg
2053
2054    #Interpolate
2055    from anuga.fit_interpolate.interpolate import Interpolate
2056
2057    # Remove loners from vertex_points, volumes here
2058    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
2059    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
2060    #import sys; sys.exit()
2061    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2062
2063    #Interpolate using quantity values
2064    if verbose: print 'Interpolating'
2065    grid_values = interp.interpolate(q, grid_points).flat
2066
2067
2068    if verbose:
2069        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2070                                                      max(grid_values))
2071
2072    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2073    P = interp.mesh.get_boundary_polygon()
2074    outside_indices = outside_polygon(grid_points, P, closed=True)
2075
2076    for i in outside_indices:
2077        grid_values[i] = NODATA_value
2078
2079
2080
2081
2082    if format.lower() == 'ers':
2083        # setup ERS header information
2084        grid_values = reshape(grid_values,(nrows, ncols))
2085        header = {}
2086        header['datum'] = '"' + datum + '"'
2087        # FIXME The use of hardwired UTM and zone number needs to be made optional
2088        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2089        header['projection'] = '"UTM-' + str(zone) + '"'
2090        header['coordinatetype'] = 'EN'
2091        if header['coordinatetype'] == 'LL':
2092            header['longitude'] = str(newxllcorner)
2093            header['latitude'] = str(newyllcorner)
2094        elif header['coordinatetype'] == 'EN':
2095            header['eastings'] = str(newxllcorner)
2096            header['northings'] = str(newyllcorner)
2097        header['nullcellvalue'] = str(NODATA_value)
2098        header['xdimension'] = str(cellsize)
2099        header['ydimension'] = str(cellsize)
2100        header['value'] = '"' + quantity + '"'
2101        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2102
2103
2104        #Write
2105        if verbose: print 'Writing %s' %demfile
2106        import ermapper_grids
2107        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2108
2109        fid.close()
2110    else:
2111        #Write to Ascii format
2112
2113        #Write prj file
2114        prjfile = basename_out + '.prj'
2115
2116        if verbose: print 'Writing %s' %prjfile
2117        prjid = open(prjfile, 'w')
2118        prjid.write('Projection    %s\n' %'UTM')
2119        prjid.write('Zone          %d\n' %zone)
2120        prjid.write('Datum         %s\n' %datum)
2121        prjid.write('Zunits        NO\n')
2122        prjid.write('Units         METERS\n')
2123        prjid.write('Spheroid      %s\n' %datum)
2124        prjid.write('Xshift        %d\n' %false_easting)
2125        prjid.write('Yshift        %d\n' %false_northing)
2126        prjid.write('Parameters\n')
2127        prjid.close()
2128
2129
2130
2131        if verbose: print 'Writing %s' %demfile
2132
2133        ascid = open(demfile, 'w')
2134
2135        ascid.write('ncols         %d\n' %ncols)
2136        ascid.write('nrows         %d\n' %nrows)
2137        ascid.write('xllcorner     %d\n' %newxllcorner)
2138        ascid.write('yllcorner     %d\n' %newyllcorner)
2139        ascid.write('cellsize      %f\n' %cellsize)
2140        ascid.write('NODATA_value  %d\n' %NODATA_value)
2141
2142
2143        #Get bounding polygon from mesh
2144        #P = interp.mesh.get_boundary_polygon()
2145        #inside_indices = inside_polygon(grid_points, P)
2146
2147        for i in range(nrows):
2148            if verbose and i%((nrows+10)/10)==0:
2149                print 'Doing row %d of %d' %(i, nrows)
2150
2151            base_index = (nrows-i-1)*ncols
2152
2153            slice = grid_values[base_index:base_index+ncols]
2154            s = array2string(slice, max_line_width=sys.maxint)
2155            ascid.write(s[1:-1] + '\n')
2156
2157
2158            #print
2159            #for j in range(ncols):
2160            #    index = base_index+j#
2161            #    print grid_values[index],
2162            #    ascid.write('%f ' %grid_values[index])
2163            #ascid.write('\n')
2164
2165        #Close
2166        ascid.close()
2167        fid.close()
2168        return basename_out
2169
2170#Backwards compatibility
2171def sww2asc(basename_in, basename_out = None,
2172            quantity = None,
2173            timestep = None,
2174            reduction = None,
2175            cellsize = 10,
2176            verbose = False,
2177            origin = None):
2178    print 'sww2asc will soon be obsoleted - please use sww2dem'
2179    sww2dem(basename_in,
2180            basename_out = basename_out,
2181            quantity = quantity,
2182            timestep = timestep,
2183            reduction = reduction,
2184            cellsize = cellsize,
2185            verbose = verbose,
2186            origin = origin,
2187        datum = 'WGS84',
2188        format = 'asc')
2189
2190def sww2ers(basename_in, basename_out = None,
2191            quantity = None,
2192            timestep = None,
2193            reduction = None,
2194            cellsize = 10,
2195            verbose = False,
2196            origin = None,
2197            datum = 'WGS84'):
2198    print 'sww2ers will soon be obsoleted - please use sww2dem'
2199    sww2dem(basename_in,
2200            basename_out = basename_out,
2201            quantity = quantity,
2202            timestep = timestep,
2203            reduction = reduction,
2204            cellsize = cellsize,
2205            verbose = verbose,
2206            origin = origin,
2207            datum = datum,
2208            format = 'ers')
2209################################# END COMPATIBILITY ##############
2210
2211
2212
2213def sww2pts(basename_in, basename_out=None,
2214            data_points=None,
2215            quantity=None,
2216            timestep=None,
2217            reduction=None,
2218            NODATA_value=-9999,
2219            verbose=False,
2220            origin=None):
2221            #datum = 'WGS84')
2222
2223
2224    """Read SWW file and convert to interpolated values at selected points
2225
2226    The parameter quantity' must be the name of an existing quantity or
2227    an expression involving existing quantities. The default is
2228    'elevation'.
2229
2230    if timestep (an index) is given, output quantity at that timestep
2231
2232    if reduction is given use that to reduce quantity over all timesteps.
2233
2234    data_points (Nx2 array) give locations of points where quantity is to be computed.
2235   
2236    """
2237
2238    import sys
2239    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2240    from Numeric import array2string
2241
2242    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2243    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2244
2245    from anuga.geospatial_data.geospatial_data import Geospatial_data
2246
2247    if quantity is None:
2248        quantity = 'elevation'
2249
2250    if reduction is None:
2251        reduction = max
2252
2253    if basename_out is None:
2254        basename_out = basename_in + '_%s' %quantity
2255
2256    swwfile = basename_in + '.sww'
2257    ptsfile = basename_out + '.pts'
2258
2259    # Read sww file
2260    if verbose: print 'Reading from %s' %swwfile
2261    from Scientific.IO.NetCDF import NetCDFFile
2262    fid = NetCDFFile(swwfile)
2263
2264    # Get extent and reference
2265    x = fid.variables['x'][:]
2266    y = fid.variables['y'][:]
2267    volumes = fid.variables['volumes'][:]
2268
2269    number_of_timesteps = fid.dimensions['number_of_timesteps']
2270    number_of_points = fid.dimensions['number_of_points']
2271    if origin is None:
2272
2273        # Get geo_reference
2274        # sww files don't have to have a geo_ref
2275        try:
2276            geo_reference = Geo_reference(NetCDFObject=fid)
2277        except AttributeError, e:
2278            geo_reference = Geo_reference() #Default georef object
2279
2280        xllcorner = geo_reference.get_xllcorner()
2281        yllcorner = geo_reference.get_yllcorner()
2282        zone = geo_reference.get_zone()
2283    else:
2284        zone = origin[0]
2285        xllcorner = origin[1]
2286        yllcorner = origin[2]
2287
2288
2289
2290    # FIXME: Refactor using code from file_function.statistics
2291    # Something like print swwstats(swwname)
2292    if verbose:
2293        x = fid.variables['x'][:]
2294        y = fid.variables['y'][:]
2295        times = fid.variables['time'][:]
2296        print '------------------------------------------------'
2297        print 'Statistics of SWW file:'
2298        print '  Name: %s' %swwfile
2299        print '  Reference:'
2300        print '    Lower left corner: [%f, %f]'\
2301              %(xllcorner, yllcorner)
2302        print '    Start time: %f' %fid.starttime[0]
2303        print '  Extent:'
2304        print '    x [m] in [%f, %f], len(x) == %d'\
2305              %(min(x.flat), max(x.flat), len(x.flat))
2306        print '    y [m] in [%f, %f], len(y) == %d'\
2307              %(min(y.flat), max(y.flat), len(y.flat))
2308        print '    t [s] in [%f, %f], len(t) == %d'\
2309              %(min(times), max(times), len(times))
2310        print '  Quantities [SI units]:'
2311        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2312            q = fid.variables[name][:].flat
2313            print '    %s in [%f, %f]' %(name, min(q), max(q))
2314
2315
2316
2317    # Get quantity and reduce if applicable
2318    if verbose: print 'Processing quantity %s' %quantity
2319
2320    # Turn NetCDF objects into Numeric arrays
2321    quantity_dict = {}
2322    for name in fid.variables.keys():
2323        quantity_dict[name] = fid.variables[name][:]
2324
2325
2326
2327    # Convert quantity expression to quantities found in sww file   
2328    q = apply_expression_to_dictionary(quantity, quantity_dict)
2329
2330
2331
2332    if len(q.shape) == 2:
2333        # q has a time component and needs to be reduced along
2334        # the temporal dimension
2335        if verbose: print 'Reducing quantity %s' %quantity
2336        q_reduced = zeros( number_of_points, Float )
2337
2338        for k in range(number_of_points):
2339            q_reduced[k] = reduction( q[:,k] )
2340
2341        q = q_reduced
2342
2343    # Post condition: Now q has dimension: number_of_points
2344    assert len(q.shape) == 1
2345    assert q.shape[0] == number_of_points
2346
2347
2348    if verbose:
2349        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2350
2351
2352    # Create grid and update xll/yll corner and x,y
2353    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2354    assert len(vertex_points.shape) == 2
2355
2356    # Interpolate
2357    from anuga.fit_interpolate.interpolate import Interpolate
2358    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2359
2360    # Interpolate using quantity values
2361    if verbose: print 'Interpolating'
2362    interpolated_values = interp.interpolate(q, data_points).flat
2363
2364
2365    if verbose:
2366        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2367                                                      max(interpolated_values))
2368
2369    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2370    P = interp.mesh.get_boundary_polygon()
2371    outside_indices = outside_polygon(data_points, P, closed=True)
2372
2373    for i in outside_indices:
2374        interpolated_values[i] = NODATA_value
2375
2376
2377    # Store results   
2378    G = Geospatial_data(data_points=data_points,
2379                        attributes=interpolated_values)
2380
2381    G.export_points_file(ptsfile, absolute = True)
2382
2383    fid.close()
2384
2385
2386def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2387                                  use_cache = False,
2388                                  verbose = False):
2389    """Read Digitial Elevation model from the following ASCII format (.asc)
2390
2391    Example:
2392
2393    ncols         3121
2394    nrows         1800
2395    xllcorner     722000
2396    yllcorner     5893000
2397    cellsize      25
2398    NODATA_value  -9999
2399    138.3698 137.4194 136.5062 135.5558 ..........
2400
2401    Convert basename_in + '.asc' to NetCDF format (.dem)
2402    mimicking the ASCII format closely.
2403
2404
2405    An accompanying file with same basename_in but extension .prj must exist
2406    and is used to fix the UTM zone, datum, false northings and eastings.
2407
2408    The prj format is assumed to be as
2409
2410    Projection    UTM
2411    Zone          56
2412    Datum         WGS84
2413    Zunits        NO
2414    Units         METERS
2415    Spheroid      WGS84
2416    Xshift        0.0000000000
2417    Yshift        10000000.0000000000
2418    Parameters
2419    """
2420
2421
2422
2423    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2424
2425    if use_cache is True:
2426        from caching import cache
2427        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2428                       dependencies = [basename_in + '.asc',
2429                                       basename_in + '.prj'],
2430                       verbose = verbose)
2431
2432    else:
2433        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2434
2435    return result
2436
2437
2438
2439
2440
2441
2442def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2443                                  verbose = False):
2444    """Read Digitial Elevation model from the following ASCII format (.asc)
2445
2446    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2447    """
2448
2449    import os
2450    from Scientific.IO.NetCDF import NetCDFFile
2451    from Numeric import Float, array
2452
2453    #root, ext = os.path.splitext(basename_in)
2454    root = basename_in
2455
2456    ###########################################
2457    # Read Meta data
2458    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2459    metadatafile = open(root + '.prj')
2460    metalines = metadatafile.readlines()
2461    metadatafile.close()
2462
2463    L = metalines[0].strip().split()
2464    assert L[0].strip().lower() == 'projection'
2465    projection = L[1].strip()                   #TEXT
2466
2467    L = metalines[1].strip().split()
2468    assert L[0].strip().lower() == 'zone'
2469    zone = int(L[1].strip())
2470
2471    L = metalines[2].strip().split()
2472    assert L[0].strip().lower() == 'datum'
2473    datum = L[1].strip()                        #TEXT
2474
2475    L = metalines[3].strip().split()
2476    assert L[0].strip().lower() == 'zunits'     #IGNORE
2477    zunits = L[1].strip()                       #TEXT
2478
2479    L = metalines[4].strip().split()
2480    assert L[0].strip().lower() == 'units'
2481    units = L[1].strip()                        #TEXT
2482
2483    L = metalines[5].strip().split()
2484    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2485    spheroid = L[1].strip()                     #TEXT
2486
2487    L = metalines[6].strip().split()
2488    assert L[0].strip().lower() == 'xshift'
2489    false_easting = float(L[1].strip())
2490
2491    L = metalines[7].strip().split()
2492    assert L[0].strip().lower() == 'yshift'
2493    false_northing = float(L[1].strip())
2494
2495    #print false_easting, false_northing, zone, datum
2496
2497
2498    ###########################################
2499    #Read DEM data
2500
2501    datafile = open(basename_in + '.asc')
2502
2503    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2504    lines = datafile.readlines()
2505    datafile.close()
2506
2507    if verbose: print 'Got', len(lines), ' lines'
2508
2509    ncols = int(lines[0].split()[1].strip())
2510    nrows = int(lines[1].split()[1].strip())
2511
2512    # Do cellsize (line 4) before line 2 and 3
2513    cellsize = float(lines[4].split()[1].strip())   
2514
2515    # Checks suggested by Joaquim Luis
2516    # Our internal representation of xllcorner
2517    # and yllcorner is non-standard.
2518    xref = lines[2].split()
2519    if xref[0].strip() == 'xllcorner':
2520        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2521    elif xref[0].strip() == 'xllcenter':
2522        xllcorner = float(xref[1].strip())
2523    else:
2524        msg = 'Unknown keyword: %s' %xref[0].strip()
2525        raise Exception, msg
2526
2527    yref = lines[3].split()
2528    if yref[0].strip() == 'yllcorner':
2529        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2530    elif yref[0].strip() == 'yllcenter':
2531        yllcorner = float(yref[1].strip())
2532    else:
2533        msg = 'Unknown keyword: %s' %yref[0].strip()
2534        raise Exception, msg
2535       
2536
2537    NODATA_value = int(lines[5].split()[1].strip())
2538
2539    assert len(lines) == nrows + 6
2540
2541
2542    ##########################################
2543
2544
2545    if basename_out == None:
2546        netcdfname = root + '.dem'
2547    else:
2548        netcdfname = basename_out + '.dem'
2549
2550    if verbose: print 'Store to NetCDF file %s' %netcdfname
2551    # NetCDF file definition
2552    fid = NetCDFFile(netcdfname, 'w')
2553
2554    #Create new file
2555    fid.institution = 'Geoscience Australia'
2556    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2557                      'of spatial point data'
2558
2559    fid.ncols = ncols
2560    fid.nrows = nrows
2561    fid.xllcorner = xllcorner
2562    fid.yllcorner = yllcorner
2563    fid.cellsize = cellsize
2564    fid.NODATA_value = NODATA_value
2565
2566    fid.zone = zone
2567    fid.false_easting = false_easting
2568    fid.false_northing = false_northing
2569    fid.projection = projection
2570    fid.datum = datum
2571    fid.units = units
2572
2573
2574    # dimension definitions
2575    fid.createDimension('number_of_rows', nrows)
2576    fid.createDimension('number_of_columns', ncols)
2577
2578    # variable definitions
2579    fid.createVariable('elevation', Float, ('number_of_rows',
2580                                            'number_of_columns'))
2581
2582    # Get handles to the variables
2583    elevation = fid.variables['elevation']
2584
2585    #Store data
2586    n = len(lines[6:])
2587    for i, line in enumerate(lines[6:]):
2588        fields = line.split()
2589        if verbose and i%((n+10)/10)==0:
2590            print 'Processing row %d of %d' %(i, nrows)
2591
2592        elevation[i, :] = array([float(x) for x in fields])
2593
2594    fid.close()
2595
2596
2597
2598
2599
2600def ferret2sww(basename_in, basename_out = None,
2601               verbose = False,
2602               minlat = None, maxlat = None,
2603               minlon = None, maxlon = None,
2604               mint = None, maxt = None, mean_stage = 0,
2605               origin = None, zscale = 1,
2606               fail_on_NaN = True,
2607               NaN_filler = 0,
2608               elevation = None,
2609               inverted_bathymetry = True
2610               ): #FIXME: Bathymetry should be obtained
2611                                  #from MOST somehow.
2612                                  #Alternatively from elsewhere
2613                                  #or, as a last resort,
2614                                  #specified here.
2615                                  #The value of -100 will work
2616                                  #for the Wollongong tsunami
2617                                  #scenario but is very hacky
2618    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2619    sww format native to abstract_2d_finite_volumes.
2620
2621    Specify only basename_in and read files of the form
2622    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2623    relative height, x-velocity and y-velocity, respectively.
2624
2625    Also convert latitude and longitude to UTM. All coordinates are
2626    assumed to be given in the GDA94 datum.
2627
2628    min's and max's: If omitted - full extend is used.
2629    To include a value min may equal it, while max must exceed it.
2630    Lat and lon are assuemd to be in decimal degrees
2631
2632    origin is a 3-tuple with geo referenced
2633    UTM coordinates (zone, easting, northing)
2634
2635    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2636    which means that longitude is the fastest
2637    varying dimension (row major order, so to speak)
2638
2639    ferret2sww uses grid points as vertices in a triangular grid
2640    counting vertices from lower left corner upwards, then right
2641    """
2642
2643    import os
2644    from Scientific.IO.NetCDF import NetCDFFile
2645    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2646    from Numeric import allclose, around
2647
2648    precision = Float
2649
2650    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2651
2652    if minlat != None:
2653        assert -90 < minlat < 90 , msg
2654    if maxlat != None:
2655        assert -90 < maxlat < 90 , msg
2656        if minlat != None:
2657            assert maxlat > minlat
2658    if minlon != None:
2659        assert -180 < minlon < 180 , msg
2660    if maxlon != None:
2661        assert -180 < maxlon < 180 , msg
2662        if minlon != None:
2663            assert maxlon > minlon
2664       
2665
2666
2667    #Get NetCDF data
2668    if verbose: print 'Reading files %s_*.nc' %basename_in
2669    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2670    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2671    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2672    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2673    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2674
2675    if basename_out is None:
2676        swwname = basename_in + '.sww'
2677    else:
2678        swwname = basename_out + '.sww'
2679
2680    # Get dimensions of file_h
2681    for dimension in file_h.dimensions.keys():
2682        if dimension[:3] == 'LON':
2683            dim_h_longitude = dimension
2684        if dimension[:3] == 'LAT':
2685            dim_h_latitude = dimension
2686        if dimension[:4] == 'TIME':
2687            dim_h_time = dimension
2688
2689#    print 'long:', dim_h_longitude
2690#    print 'lats:', dim_h_latitude
2691#    print 'times:', dim_h_time
2692
2693    times = file_h.variables[dim_h_time]
2694    latitudes = file_h.variables[dim_h_latitude]
2695    longitudes = file_h.variables[dim_h_longitude]
2696   
2697    # get dimensions for file_e
2698    for dimension in file_e.dimensions.keys():
2699        if dimension[:3] == 'LON':
2700            dim_e_longitude = dimension
2701        if dimension[:3] == 'LAT':
2702            dim_e_latitude = dimension
2703
2704    # get dimensions for file_u
2705    for dimension in file_u.dimensions.keys():
2706        if dimension[:3] == 'LON':
2707            dim_u_longitude = dimension
2708        if dimension[:3] == 'LAT':
2709            dim_u_latitude = dimension
2710        if dimension[:4] == 'TIME':
2711            dim_u_time = dimension
2712           
2713    # get dimensions for file_v
2714    for dimension in file_v.dimensions.keys():
2715        if dimension[:3] == 'LON':
2716            dim_v_longitude = dimension
2717        if dimension[:3] == 'LAT':
2718            dim_v_latitude = dimension
2719        if dimension[:4] == 'TIME':
2720            dim_v_time = dimension
2721
2722
2723    # Precision used by most for lat/lon is 4 or 5 decimals
2724    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2725    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2726
2727    # Check that files are compatible
2728    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2729    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2730    assert allclose(latitudes, e_lat)
2731
2732    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2733    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2734    assert allclose(longitudes, e_lon)
2735
2736    if mint is None:
2737        jmin = 0
2738        mint = times[0]       
2739    else:
2740        jmin = searchsorted(times, mint)
2741       
2742    if maxt is None:
2743        jmax = len(times)
2744        maxt = times[-1]
2745    else:
2746        jmax = searchsorted(times, maxt)
2747
2748    #print "latitudes[:]",latitudes[:]
2749    #print "longitudes[:]",longitudes [:]
2750    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2751                                                  longitudes[:],
2752                                                  minlat, maxlat,
2753                                                  minlon, maxlon)
2754
2755
2756    times = times[jmin:jmax]
2757    latitudes = latitudes[kmin:kmax]
2758    longitudes = longitudes[lmin:lmax]
2759
2760    #print "latitudes[:]",latitudes[:]
2761    #print "longitudes[:]",longitudes [:]
2762
2763    if verbose: print 'cropping'
2764    zname = 'ELEVATION'
2765
2766    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2767    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2768    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2769    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2770
2771    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2772    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2773    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2774    #        from Numeric import asarray
2775    #        elevations=elevations.tolist()
2776    #        elevations.reverse()
2777    #        elevations=asarray(elevations)
2778    #    else:
2779    #        from Numeric import asarray
2780    #        elevations=elevations.tolist()
2781    #        elevations.reverse()
2782    #        elevations=asarray(elevations)
2783    #        'print hmmm'
2784
2785
2786
2787    #Get missing values
2788    nan_ha = file_h.variables['HA'].missing_value[0]
2789    nan_ua = file_u.variables['UA'].missing_value[0]
2790    nan_va = file_v.variables['VA'].missing_value[0]
2791    if hasattr(file_e.variables[zname],'missing_value'):
2792        nan_e  = file_e.variables[zname].missing_value[0]
2793    else:
2794        nan_e = None
2795
2796    #Cleanup
2797    from Numeric import sometrue
2798
2799    missing = (amplitudes == nan_ha)
2800    if sometrue (missing):
2801        if fail_on_NaN:
2802            msg = 'NetCDFFile %s contains missing values'\
2803                  %(basename_in+'_ha.nc')
2804            raise DataMissingValuesError, msg
2805        else:
2806            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2807
2808    missing = (uspeed == nan_ua)
2809    if sometrue (missing):
2810        if fail_on_NaN:
2811            msg = 'NetCDFFile %s contains missing values'\
2812                  %(basename_in+'_ua.nc')
2813            raise DataMissingValuesError, msg
2814        else:
2815            uspeed = uspeed*(missing==0) + missing*NaN_filler
2816
2817    missing = (vspeed == nan_va)
2818    if sometrue (missing):
2819        if fail_on_NaN:
2820            msg = 'NetCDFFile %s contains missing values'\
2821                  %(basename_in+'_va.nc')
2822            raise DataMissingValuesError, msg
2823        else:
2824            vspeed = vspeed*(missing==0) + missing*NaN_filler
2825
2826
2827    missing = (elevations == nan_e)
2828    if sometrue (missing):
2829        if fail_on_NaN:
2830            msg = 'NetCDFFile %s contains missing values'\
2831                  %(basename_in+'_e.nc')
2832            raise DataMissingValuesError, msg
2833        else:
2834            elevations = elevations*(missing==0) + missing*NaN_filler
2835
2836    #######
2837
2838
2839
2840    number_of_times = times.shape[0]
2841    number_of_latitudes = latitudes.shape[0]
2842    number_of_longitudes = longitudes.shape[0]
2843
2844    assert amplitudes.shape[0] == number_of_times
2845    assert amplitudes.shape[1] == number_of_latitudes
2846    assert amplitudes.shape[2] == number_of_longitudes
2847
2848    if verbose:
2849        print '------------------------------------------------'
2850        print 'Statistics:'
2851        print '  Extent (lat/lon):'
2852        print '    lat in [%f, %f], len(lat) == %d'\
2853              %(min(latitudes.flat), max(latitudes.flat),
2854                len(latitudes.flat))
2855        print '    lon in [%f, %f], len(lon) == %d'\
2856              %(min(longitudes.flat), max(longitudes.flat),
2857                len(longitudes.flat))
2858        print '    t in [%f, %f], len(t) == %d'\
2859              %(min(times.flat), max(times.flat), len(times.flat))
2860
2861        q = amplitudes.flat
2862        name = 'Amplitudes (ha) [cm]'
2863        print %s in [%f, %f]' %(name, min(q), max(q))
2864
2865        q = uspeed.flat
2866        name = 'Speeds (ua) [cm/s]'
2867        print %s in [%f, %f]' %(name, min(q), max(q))
2868
2869        q = vspeed.flat
2870        name = 'Speeds (va) [cm/s]'
2871        print %s in [%f, %f]' %(name, min(q), max(q))
2872
2873        q = elevations.flat
2874        name = 'Elevations (e) [m]'
2875        print %s in [%f, %f]' %(name, min(q), max(q))
2876
2877
2878    # print number_of_latitudes, number_of_longitudes
2879    number_of_points = number_of_latitudes*number_of_longitudes
2880    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2881
2882
2883    file_h.close()
2884    file_u.close()
2885    file_v.close()
2886    file_e.close()
2887
2888
2889    # NetCDF file definition
2890    outfile = NetCDFFile(swwname, 'w')
2891
2892    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2893                  %(basename_in + '_ha.nc',
2894                    basename_in + '_ua.nc',
2895                    basename_in + '_va.nc',
2896                    basename_in + '_e.nc')
2897   
2898    # Create new file
2899    starttime = times[0]
2900    sww = Write_sww()
2901    sww.store_header(outfile, times, number_of_volumes,
2902                     number_of_points, description=description,
2903                     verbose=verbose)
2904
2905    # Store
2906    from anuga.coordinate_transforms.redfearn import redfearn
2907    x = zeros(number_of_points, Float)  #Easting
2908    y = zeros(number_of_points, Float)  #Northing
2909
2910
2911    if verbose: print 'Making triangular grid'
2912
2913    # Check zone boundaries
2914    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2915
2916    vertices = {}
2917    i = 0
2918    for k, lat in enumerate(latitudes):       #Y direction
2919        for l, lon in enumerate(longitudes):  #X direction
2920
2921            vertices[l,k] = i
2922
2923            zone, easting, northing = redfearn(lat,lon)
2924
2925            msg = 'Zone boundary crossed at longitude =', lon
2926            #assert zone == refzone, msg
2927            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2928            x[i] = easting
2929            y[i] = northing
2930            i += 1
2931
2932    #Construct 2 triangles per 'rectangular' element
2933    volumes = []
2934    for l in range(number_of_longitudes-1):    #X direction
2935        for k in range(number_of_latitudes-1): #Y direction
2936            v1 = vertices[l,k+1]
2937            v2 = vertices[l,k]
2938            v3 = vertices[l+1,k+1]
2939            v4 = vertices[l+1,k]
2940
2941            volumes.append([v1,v2,v3]) #Upper element
2942            volumes.append([v4,v3,v2]) #Lower element
2943
2944    volumes = array(volumes)
2945
2946    if origin is None:
2947        origin = Geo_reference(refzone,min(x),min(y))
2948    geo_ref = write_NetCDF_georeference(origin, outfile)
2949   
2950    if elevation is not None:
2951        z = elevation
2952    else:
2953        if inverted_bathymetry:
2954            z = -1*elevations
2955        else:
2956            z = elevations
2957    #FIXME: z should be obtained from MOST and passed in here
2958
2959    from Numeric import resize
2960    z = resize(z,outfile.variables['z'][:].shape)
2961    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2962    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
2963    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2964    outfile.variables['elevation'][:] = z
2965    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2966
2967
2968
2969    #Time stepping
2970    stage = outfile.variables['stage']
2971    xmomentum = outfile.variables['xmomentum']
2972    ymomentum = outfile.variables['ymomentum']
2973
2974    if verbose: print 'Converting quantities'
2975    n = len(times)
2976    for j in range(n):
2977        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2978        i = 0
2979        for k in range(number_of_latitudes):      #Y direction
2980            for l in range(number_of_longitudes): #X direction
2981                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2982                stage[j,i] = w
2983                h = w - z[i]
2984                xmomentum[j,i] = uspeed[j,k,l]/100*h
2985                ymomentum[j,i] = vspeed[j,k,l]/100*h
2986                i += 1
2987
2988    #outfile.close()
2989
2990    #FIXME: Refactor using code from file_function.statistics
2991    #Something like print swwstats(swwname)
2992    if verbose:
2993        x = outfile.variables['x'][:]
2994        y = outfile.variables['y'][:]
2995        print '------------------------------------------------'
2996        print 'Statistics of output file:'
2997        print '  Name: %s' %swwname
2998        print '  Reference:'
2999        print '    Lower left corner: [%f, %f]'\
3000              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
3001        print ' Start time: %f' %starttime
3002        print '    Min time: %f' %mint
3003        print '    Max time: %f' %maxt
3004        print '  Extent:'
3005        print '    x [m] in [%f, %f], len(x) == %d'\
3006              %(min(x.flat), max(x.flat), len(x.flat))
3007        print '    y [m] in [%f, %f], len(y) == %d'\
3008              %(min(y.flat), max(y.flat), len(y.flat))
3009        print '    t [s] in [%f, %f], len(t) == %d'\
3010              %(min(times), max(times), len(times))
3011        print '  Quantities [SI units]:'
3012        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3013            q = outfile.variables[name][:].flat
3014            print '    %s in [%f, %f]' %(name, min(q), max(q))
3015
3016
3017
3018    outfile.close()
3019
3020
3021
3022
3023
3024def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3025    """Template for converting typical text files with time series to
3026    NetCDF tms file.
3027
3028
3029    The file format is assumed to be either two fields separated by a comma:
3030
3031        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3032
3033    E.g
3034
3035      31/08/04 00:00:00, 1.328223 0 0
3036      31/08/04 00:15:00, 1.292912 0 0
3037
3038    or time (seconds), value0 value1 value2 ...
3039   
3040      0.0, 1.328223 0 0
3041      0.1, 1.292912 0 0
3042
3043    will provide a time dependent function f(t) with three attributes
3044
3045    filename is assumed to be the rootname with extenisons .txt and .sww
3046    """
3047
3048    import time, calendar
3049    from Numeric import array
3050    from anuga.config import time_format
3051    from anuga.utilities.numerical_tools import ensure_numeric
3052
3053
3054
3055    fid = open(filename + '.txt')
3056    line = fid.readline()
3057    fid.close()
3058
3059    fields = line.split(',')
3060    msg = 'File %s must have the format date, value0 value1 value2 ...'
3061    assert len(fields) == 2, msg
3062
3063    if not time_as_seconds:
3064        try:
3065            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3066        except ValueError:
3067            msg = 'First field in file %s must be' %filename
3068            msg += ' date-time with format %s.\n' %time_format
3069            msg += 'I got %s instead.' %fields[0]
3070            raise DataTimeError, msg
3071    else:
3072        try:
3073            starttime = float(fields[0])
3074        except Error:
3075            msg = "Bad time format"
3076            raise DataTimeError, msg
3077
3078
3079    #Split values
3080    values = []
3081    for value in fields[1].split():
3082        values.append(float(value))
3083
3084    q = ensure_numeric(values)
3085
3086    msg = 'ERROR: File must contain at least one independent value'
3087    assert len(q.shape) == 1, msg
3088
3089
3090
3091    #Read times proper
3092    from Numeric import zeros, Float, alltrue
3093    from anuga.config import time_format
3094    import time, calendar
3095
3096    fid = open(filename + '.txt')
3097    lines = fid.readlines()
3098    fid.close()
3099
3100    N = len(lines)
3101    d = len(q)
3102
3103    T = zeros(N, Float)       #Time
3104    Q = zeros((N, d), Float)  #Values
3105
3106    for i, line in enumerate(lines):
3107        fields = line.split(',')
3108        if not time_as_seconds:
3109            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3110        else:
3111             realtime = float(fields[0])
3112        T[i] = realtime - starttime
3113
3114        for j, value in enumerate(fields[1].split()):
3115            Q[i, j] = float(value)
3116
3117    msg = 'File %s must list time as a monotonuosly ' %filename
3118    msg += 'increasing sequence'
3119    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3120
3121    #Create NetCDF file
3122    from Scientific.IO.NetCDF import NetCDFFile
3123
3124    fid = NetCDFFile(filename + '.tms', 'w')
3125
3126
3127    fid.institution = 'Geoscience Australia'
3128    fid.description = 'Time series'
3129
3130
3131    #Reference point
3132    #Start time in seconds since the epoch (midnight 1/1/1970)
3133    #FIXME: Use Georef
3134    fid.starttime = starttime
3135
3136    # dimension definitions
3137    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3138    #fid.createDimension('number_of_vertices', 3)
3139
3140
3141    fid.createDimension('number_of_timesteps', len(T))
3142
3143    fid.createVariable('time', Float, ('number_of_timesteps',))
3144
3145    fid.variables['time'][:] = T
3146
3147    for i in range(Q.shape[1]):
3148        try:
3149            name = quantity_names[i]
3150        except:
3151            name = 'Attribute%d'%i
3152
3153        fid.createVariable(name, Float, ('number_of_timesteps',))
3154        fid.variables[name][:] = Q[:,i]
3155
3156    fid.close()
3157
3158
3159def extent_sww(file_name):
3160    """
3161    Read in an sww file.
3162
3163    Input;
3164    file_name - the sww file
3165
3166    Output;
3167    z - Vector of bed elevation
3168    volumes - Array.  Each row has 3 values, representing
3169    the vertices that define the volume
3170    time - Vector of the times where there is stage information
3171    stage - array with respect to time and vertices (x,y)
3172    """
3173
3174
3175    from Scientific.IO.NetCDF import NetCDFFile
3176
3177    #Check contents
3178    #Get NetCDF
3179    fid = NetCDFFile(file_name, 'r')
3180
3181    # Get the variables
3182    x = fid.variables['x'][:]
3183    y = fid.variables['y'][:]
3184    stage = fid.variables['stage'][:]
3185    #print "stage",stage
3186    #print "stage.shap",stage.shape
3187    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3188    #print "min(stage)",min(stage)
3189
3190    fid.close()
3191
3192    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3193
3194
3195def sww2domain(filename,boundary=None,t=None,\
3196               fail_if_NaN=True,NaN_filler=0\
3197               ,verbose = False,very_verbose = False):
3198    """
3199    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3200
3201    Boundary is not recommended if domain.smooth is not selected, as it
3202    uses unique coordinates, but not unique boundaries. This means that
3203    the boundary file will not be compatable with the coordinates, and will
3204    give a different final boundary, or crash.
3205    """
3206    NaN=9.969209968386869e+036
3207    #initialise NaN.
3208
3209    from Scientific.IO.NetCDF import NetCDFFile
3210    from shallow_water import Domain
3211    from Numeric import asarray, transpose, resize
3212
3213    if verbose: print 'Reading from ', filename
3214    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3215    time = fid.variables['time']       #Timesteps
3216    if t is None:
3217        t = time[-1]
3218    time_interp = get_time_interp(time,t)
3219
3220    # Get the variables as Numeric arrays
3221    x = fid.variables['x'][:]             #x-coordinates of vertices
3222    y = fid.variables['y'][:]             #y-coordinates of vertices
3223    elevation = fid.variables['elevation']     #Elevation
3224    stage = fid.variables['stage']     #Water level
3225    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3226    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3227
3228    starttime = fid.starttime[0]
3229    volumes = fid.variables['volumes'][:] #Connectivity
3230    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3231
3232    conserved_quantities = []
3233    interpolated_quantities = {}
3234    other_quantities = []
3235
3236    # get geo_reference
3237    #sww files don't have to have a geo_ref
3238    try:
3239        geo_reference = Geo_reference(NetCDFObject=fid)
3240    except: #AttributeError, e:
3241        geo_reference = None
3242
3243    if verbose: print '    getting quantities'
3244    for quantity in fid.variables.keys():
3245        dimensions = fid.variables[quantity].dimensions
3246        if 'number_of_timesteps' in dimensions:
3247            conserved_quantities.append(quantity)
3248            interpolated_quantities[quantity]=\
3249                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3250        else: other_quantities.append(quantity)
3251
3252    other_quantities.remove('x')
3253    other_quantities.remove('y')
3254    other_quantities.remove('z')
3255    other_quantities.remove('volumes')
3256    try:
3257        other_quantities.remove('stage_range')
3258        other_quantities.remove('xmomentum_range')
3259        other_quantities.remove('ymomentum_range')
3260        other_quantities.remove('elevation_range')
3261    except:
3262        pass
3263       
3264
3265    conserved_quantities.remove('time')
3266
3267    if verbose: print '    building domain'
3268    #    From domain.Domain:
3269    #    domain = Domain(coordinates, volumes,\
3270    #                    conserved_quantities = conserved_quantities,\
3271    #                    other_quantities = other_quantities,zone=zone,\
3272    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3273
3274    #   From shallow_water.Domain:
3275    coordinates=coordinates.tolist()
3276    volumes=volumes.tolist()
3277    #FIXME:should this be in mesh?(peter row)
3278    if fid.smoothing == 'Yes': unique = False
3279    else: unique = True
3280    if unique:
3281        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3282
3283
3284    try:
3285        domain = Domain(coordinates, volumes, boundary)
3286    except AssertionError, e:
3287        fid.close()
3288        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3289        raise DataDomainError, msg
3290
3291    if not boundary is None:
3292        domain.boundary = boundary
3293
3294    domain.geo_reference = geo_reference
3295
3296    domain.starttime=float(starttime)+float(t)
3297    domain.time=0.0
3298
3299    for quantity in other_quantities:
3300        try:
3301            NaN = fid.variables[quantity].missing_value
3302        except:
3303            pass #quantity has no missing_value number
3304        X = fid.variables[quantity][:]
3305        if very_verbose:
3306            print '       ',quantity
3307            print '        NaN =',NaN
3308            print '        max(X)'
3309            print '       ',max(X)
3310            print '        max(X)==NaN'
3311            print '       ',max(X)==NaN
3312            print ''
3313        if (max(X)==NaN) or (min(X)==NaN):
3314            if fail_if_NaN:
3315                msg = 'quantity "%s" contains no_data entry'%quantity
3316                raise DataMissingValuesError, msg
3317            else:
3318                data = (X<>NaN)
3319                X = (X*data)+(data==0)*NaN_filler
3320        if unique:
3321            X = resize(X,(len(X)/3,3))
3322        domain.set_quantity(quantity,X)
3323    #
3324    for quantity in conserved_quantities:
3325        try:
3326            NaN = fid.variables[quantity].missing_value
3327        except:
3328            pass #quantity has no missing_value number
3329        X = interpolated_quantities[quantity]
3330        if very_verbose:
3331            print '       ',quantity
3332            print '        NaN =',NaN
3333            print '        max(X)'
3334            print '       ',max(X)
3335            print '        max(X)==NaN'
3336            print '       ',max(X)==NaN
3337            print ''
3338        if (max(X)==NaN) or (min(X)==NaN):
3339            if fail_if_NaN:
3340                msg = 'quantity "%s" contains no_data entry'%quantity
3341                raise DataMissingValuesError, msg
3342            else:
3343                data = (X<>NaN)
3344                X = (X*data)+(data==0)*NaN_filler
3345        if unique:
3346            X = resize(X,(X.shape[0]/3,3))
3347        domain.set_quantity(quantity,X)
3348
3349    fid.close()
3350    return domain
3351
3352def interpolated_quantity(saved_quantity,time_interp):
3353
3354    #given an index and ratio, interpolate quantity with respect to time.
3355    index,ratio = time_interp
3356    Q = saved_quantity
3357    if ratio > 0:
3358        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3359    else:
3360        q = Q[index]
3361    #Return vector of interpolated values
3362    return q
3363
3364def get_time_interp(time,t=None):
3365    #Finds the ratio and index for time interpolation.
3366    #It is borrowed from previous abstract_2d_finite_volumes code.
3367    if t is None:
3368        t=time[-1]
3369        index = -1
3370        ratio = 0.
3371    else:
3372        T = time
3373        tau = t
3374        index=0
3375        msg = 'Time interval derived from file %s [%s:%s]'\
3376            %('FIXMEfilename', T[0], T[-1])
3377        msg += ' does not match model time: %s' %tau
3378        if tau < time[0]: raise DataTimeError, msg
3379        if tau > time[-1]: raise DataTimeError, msg
3380        while tau > time[index]: index += 1
3381        while tau < time[index]: index -= 1
3382        if tau == time[index]:
3383            #Protect against case where tau == time[-1] (last time)
3384            # - also works in general when tau == time[i]
3385            ratio = 0
3386        else:
3387            #t is now between index and index+1
3388            ratio = (tau - time[index])/(time[index+1] - time[index])
3389    return (index,ratio)
3390
3391
3392def weed(coordinates,volumes,boundary = None):
3393    if type(coordinates)==ArrayType:
3394        coordinates = coordinates.tolist()
3395    if type(volumes)==ArrayType:
3396        volumes = volumes.tolist()
3397
3398    unique = False
3399    point_dict = {}
3400    same_point = {}
3401    for i in range(len(coordinates)):
3402        point = tuple(coordinates[i])
3403        if point_dict.has_key(point):
3404            unique = True
3405            same_point[i]=point
3406            #to change all point i references to point j
3407        else:
3408            point_dict[point]=i
3409            same_point[i]=point
3410
3411    coordinates = []
3412    i = 0
3413    for point in point_dict.keys():
3414        point = tuple(point)
3415        coordinates.append(list(point))
3416        point_dict[point]=i
3417        i+=1
3418
3419
3420    for volume in volumes:
3421        for i in range(len(volume)):
3422            index = volume[i]
3423            if index>-1:
3424                volume[i]=point_dict[same_point[index]]
3425
3426    new_boundary = {}
3427    if not boundary is None:
3428        for segment in boundary.keys():
3429            point0 = point_dict[same_point[segment[0]]]
3430            point1 = point_dict[same_point[segment[1]]]
3431            label = boundary[segment]
3432            #FIXME should the bounday attributes be concaterated
3433            #('exterior, pond') or replaced ('pond')(peter row)
3434
3435            if new_boundary.has_key((point0,point1)):
3436                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3437                                              #+','+label
3438
3439            elif new_boundary.has_key((point1,point0)):
3440                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3441                                              #+','+label
3442            else: new_boundary[(point0,point1)]=label
3443
3444        boundary = new_boundary
3445
3446    return coordinates,volumes,boundary
3447
3448
3449def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3450                 verbose=False):
3451    """Read Digitial Elevation model from the following NetCDF format (.dem)
3452
3453    Example:
3454
3455    ncols         3121
3456    nrows         1800
3457    xllcorner     722000
3458    yllcorner     5893000
3459    cellsize      25
3460    NODATA_value  -9999
3461    138.3698 137.4194 136.5062 135.5558 ..........
3462
3463    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3464    """
3465
3466    import os
3467    from Scientific.IO.NetCDF import NetCDFFile
3468    from Numeric import Float, zeros, sum, reshape, equal
3469
3470    root = basename_in
3471    inname = root + '.dem'
3472
3473    #Open existing netcdf file to read
3474    infile = NetCDFFile(inname, 'r')
3475    if verbose: print 'Reading DEM from %s' %inname
3476
3477    #Read metadata
3478    ncols = infile.ncols[0]
3479    nrows = infile.nrows[0]
3480    xllcorner = infile.xllcorner[0]
3481    yllcorner = infile.yllcorner[0]
3482    cellsize = infile.cellsize[0]
3483    NODATA_value = infile.NODATA_value[0]
3484    zone = infile.zone[0]
3485    false_easting = infile.false_easting[0]
3486    false_northing = infile.false_northing[0]
3487    projection = infile.projection
3488    datum = infile.datum
3489    units = infile.units
3490
3491    dem_elevation = infile.variables['elevation']
3492
3493    #Get output file name
3494    if basename_out == None:
3495        outname = root + '_' + repr(cellsize_new) + '.dem'
3496    else:
3497        outname = basename_out + '.dem'
3498
3499    if verbose: print 'Write decimated NetCDF file to %s' %outname
3500
3501    #Determine some dimensions for decimated grid
3502    (nrows_stencil, ncols_stencil) = stencil.shape
3503    x_offset = ncols_stencil / 2
3504    y_offset = nrows_stencil / 2
3505    cellsize_ratio = int(cellsize_new / cellsize)
3506    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3507    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3508
3509    #Open netcdf file for output
3510    outfile = NetCDFFile(outname, 'w')
3511
3512    #Create new file
3513    outfile.institution = 'Geoscience Australia'
3514    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3515                           'of spatial point data'
3516    #Georeferencing
3517    outfile.zone = zone
3518    outfile.projection = projection
3519    outfile.datum = datum
3520    outfile.units = units
3521
3522    outfile.cellsize = cellsize_new
3523    outfile.NODATA_value = NODATA_value
3524    outfile.false_easting = false_easting
3525    outfile.false_northing = false_northing
3526
3527    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3528    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3529    outfile.ncols = ncols_new
3530    outfile.nrows = nrows_new
3531
3532    # dimension definition
3533    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3534
3535    # variable definition
3536    outfile.createVariable('elevation', Float, ('number_of_points',))
3537
3538    # Get handle to the variable
3539    elevation = outfile.variables['elevation']
3540
3541    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3542
3543    #Store data
3544    global_index = 0
3545    for i in range(nrows_new):
3546        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3547        lower_index = global_index
3548        telev =  zeros(ncols_new, Float)
3549        local_index = 0
3550        trow = i * cellsize_ratio
3551
3552        for j in range(ncols_new):
3553            tcol = j * cellsize_ratio
3554            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3555
3556            #if dem contains 1 or more NODATA_values set value in
3557            #decimated dem to NODATA_value, else compute decimated
3558            #value using stencil
3559            if sum(sum(equal(tmp, NODATA_value))) > 0:
3560                telev[local_index] = NODATA_value
3561            else:
3562                telev[local_index] = sum(sum(tmp * stencil))
3563
3564            global_index += 1
3565            local_index += 1
3566
3567        upper_index = global_index
3568
3569        elevation[lower_index:upper_index] = telev
3570
3571    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3572
3573    infile.close()
3574    outfile.close()
3575
3576
3577
3578
3579def tsh2sww(filename, verbose=False): #test_tsh2sww
3580    """
3581    to check if a tsh/msh file 'looks' good.
3582    """
3583
3584
3585    if verbose == True:print 'Creating domain from', filename
3586    domain = pmesh_to_domain_instance(filename, Domain)
3587    if verbose == True:print "Number of triangles = ", len(domain)
3588
3589    domain.smooth = True
3590    domain.format = 'sww'   #Native netcdf visualisation format
3591    file_path, filename = path.split(filename)
3592    filename, ext = path.splitext(filename)
3593    domain.set_name(filename)   
3594    domain.reduction = mean
3595    if verbose == True:print "file_path",file_path
3596    if file_path == "":file_path = "."
3597    domain.set_datadir(file_path)
3598
3599    if verbose == True:
3600        print "Output written to " + domain.get_datadir() + sep + \
3601              domain.get_name() + "." + domain.format
3602    sww = get_dataobject(domain)
3603    sww.store_connectivity()
3604    sww.store_timestep('stage')
3605
3606
3607def asc_csiro2sww(bath_dir,
3608                  elevation_dir,
3609                  ucur_dir,
3610                  vcur_dir,
3611                  sww_file,
3612                  minlat = None, maxlat = None,
3613                  minlon = None, maxlon = None,
3614                  zscale=1,
3615                  mean_stage = 0,
3616                  fail_on_NaN = True,
3617                  elevation_NaN_filler = 0,
3618                  bath_prefix='ba',
3619                  elevation_prefix='el',
3620                  verbose=False):
3621    """
3622    Produce an sww boundary file, from esri ascii data from CSIRO.
3623
3624    Also convert latitude and longitude to UTM. All coordinates are
3625    assumed to be given in the GDA94 datum.
3626
3627    assume:
3628    All files are in esri ascii format
3629
3630    4 types of information
3631    bathymetry
3632    elevation
3633    u velocity
3634    v velocity
3635
3636    Assumptions
3637    The metadata of all the files is the same
3638    Each type is in a seperate directory
3639    One bath file with extention .000
3640    The time period is less than 24hrs and uniform.
3641    """
3642    from Scientific.IO.NetCDF import NetCDFFile
3643
3644    from anuga.coordinate_transforms.redfearn import redfearn
3645
3646    precision = Float # So if we want to change the precision its done here
3647
3648    # go in to the bath dir and load the only file,
3649    bath_files = os.listdir(bath_dir)
3650
3651    bath_file = bath_files[0]
3652    bath_dir_file =  bath_dir + os.sep + bath_file
3653    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3654
3655    #Use the date.time of the bath file as a basis for
3656    #the start time for other files
3657    base_start = bath_file[-12:]
3658
3659    #go into the elevation dir and load the 000 file
3660    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3661                         + base_start
3662
3663    elevation_files = os.listdir(elevation_dir)
3664    ucur_files = os.listdir(ucur_dir)
3665    vcur_files = os.listdir(vcur_dir)
3666    elevation_files.sort()
3667    # the first elevation file should be the
3668    # file with the same base name as the bath data
3669    assert elevation_files[0] == 'el' + base_start
3670
3671    number_of_latitudes = bath_grid.shape[0]
3672    number_of_longitudes = bath_grid.shape[1]
3673    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3674
3675    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3676                  for x in range(number_of_longitudes)]
3677    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3678                 for y in range(number_of_latitudes)]
3679
3680     # reverse order of lat, so the fist lat represents the first grid row
3681    latitudes.reverse()
3682
3683    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3684                                                 minlat=minlat, maxlat=maxlat,
3685                                                 minlon=minlon, maxlon=maxlon)
3686
3687
3688    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3689    latitudes = latitudes[kmin:kmax]
3690    longitudes = longitudes[lmin:lmax]
3691    number_of_latitudes = len(latitudes)
3692    number_of_longitudes = len(longitudes)
3693    number_of_times = len(os.listdir(elevation_dir))
3694    number_of_points = number_of_latitudes*number_of_longitudes
3695    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3696
3697    #Work out the times
3698    if len(elevation_files) > 1:
3699        # Assume: The time period is less than 24hrs.
3700        time_period = (int(elevation_files[1][-3:]) - \
3701                      int(elevation_files[0][-3:]))*60*60
3702        times = [x*time_period for x in range(len(elevation_files))]
3703    else:
3704        times = [0.0]
3705
3706
3707    if verbose:
3708        print '------------------------------------------------'
3709        print 'Statistics:'
3710        print '  Extent (lat/lon):'
3711        print '    lat in [%f, %f], len(lat) == %d'\
3712              %(min(latitudes), max(latitudes),
3713                len(latitudes))
3714        print '    lon in [%f, %f], len(lon) == %d'\
3715              %(min(longitudes), max(longitudes),
3716                len(longitudes))
3717        print '    t in [%f, %f], len(t) == %d'\
3718              %(min(times), max(times), len(times))
3719
3720    ######### WRITE THE SWW FILE #############
3721    # NetCDF file definition
3722    outfile = NetCDFFile(sww_file, 'w')
3723
3724    #Create new file
3725    outfile.institution = 'Geoscience Australia'
3726    outfile.description = 'Converted from XXX'
3727
3728
3729    #For sww compatibility
3730    outfile.smoothing = 'Yes'
3731    outfile.order = 1
3732
3733    #Start time in seconds since the epoch (midnight 1/1/1970)
3734    outfile.starttime = starttime = times[0]
3735
3736
3737    # dimension definitions
3738    outfile.createDimension('number_of_volumes', number_of_volumes)
3739
3740    outfile.createDimension('number_of_vertices', 3)
3741    outfile.createDimension('number_of_points', number_of_points)
3742    outfile.createDimension('number_of_timesteps', number_of_times)
3743
3744    # variable definitions
3745    outfile.createVariable('x', precision, ('number_of_points',))
3746    outfile.createVariable('y', precision, ('number_of_points',))
3747    outfile.createVariable('elevation', precision, ('number_of_points',))
3748
3749    #FIXME: Backwards compatibility
3750    outfile.createVariable('z', precision, ('number_of_points',))
3751    #################################
3752
3753    outfile.createVariable('volumes', Int, ('number_of_volumes',
3754                                            'number_of_vertices'))
3755
3756    outfile.createVariable('time', precision,
3757                           ('number_of_timesteps',))
3758
3759    outfile.createVariable('stage', precision,
3760                           ('number_of_timesteps',
3761                            'number_of_points'))
3762
3763    outfile.createVariable('xmomentum', precision,
3764                           ('number_of_timesteps',
3765                            'number_of_points'))
3766
3767    outfile.createVariable('ymomentum', precision,
3768                           ('number_of_timesteps',
3769                            'number_of_points'))
3770
3771    #Store
3772    from anuga.coordinate_transforms.redfearn import redfearn
3773    x = zeros(number_of_points, Float)  #Easting
3774    y = zeros(number_of_points, Float)  #Northing
3775
3776    if verbose: print 'Making triangular grid'
3777    #Get zone of 1st point.
3778    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3779
3780    vertices = {}
3781    i = 0
3782    for k, lat in enumerate(latitudes):
3783        for l, lon in enumerate(longitudes):
3784
3785            vertices[l,k] = i
3786
3787            zone, easting, northing = redfearn(lat,lon)
3788
3789            msg = 'Zone boundary crossed at longitude =', lon
3790            #assert zone == refzone, msg
3791            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3792            x[i] = easting
3793            y[i] = northing
3794            i += 1
3795
3796
3797    #Construct 2 triangles per 'rectangular' element
3798    volumes = []
3799    for l in range(number_of_longitudes-1):    #X direction
3800        for k in range(number_of_latitudes-1): #Y direction
3801            v1 = vertices[l,k+1]
3802            v2 = vertices[l,k]
3803            v3 = vertices[l+1,k+1]
3804            v4 = vertices[l+1,k]
3805
3806            #Note, this is different to the ferrit2sww code
3807            #since the order of the lats is reversed.
3808            volumes.append([v1,v3,v2]) #Upper element
3809            volumes.append([v4,v2,v3]) #Lower element
3810
3811    volumes = array(volumes)
3812
3813    geo_ref = Geo_reference(refzone,min(x),min(y))
3814    geo_ref.write_NetCDF(outfile)
3815
3816    # This will put the geo ref in the middle
3817    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3818
3819
3820    if verbose:
3821        print '------------------------------------------------'
3822        print 'More Statistics:'
3823        print '  Extent (/lon):'
3824        print '    x in [%f, %f], len(lat) == %d'\
3825              %(min(x), max(x),
3826                len(x))
3827        print '    y in [%f, %f], len(lon) == %d'\
3828              %(min(y), max(y),
3829                len(y))
3830        print 'geo_ref: ',geo_ref
3831
3832    z = resize(bath_grid,outfile.variables['z'][:].shape)
3833    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3834    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3835    outfile.variables['z'][:] = z
3836    outfile.variables['elevation'][:] = z  #FIXME HACK
3837    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3838
3839    stage = outfile.variables['stage']
3840    xmomentum = outfile.variables['xmomentum']
3841    ymomentum = outfile.variables['ymomentum']
3842
3843    outfile.variables['time'][:] = times   #Store time relative
3844
3845    if verbose: print 'Converting quantities'
3846    n = number_of_times
3847    for j in range(number_of_times):
3848        # load in files
3849        elevation_meta, elevation_grid = \
3850           _read_asc(elevation_dir + os.sep + elevation_files[j])
3851
3852        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3853        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3854
3855        #cut matrix to desired size
3856        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3857        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3858        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3859       
3860        # handle missing values
3861        missing = (elevation_grid == elevation_meta['NODATA_value'])
3862        if sometrue (missing):
3863            if fail_on_NaN:
3864                msg = 'File %s contains missing values'\
3865                      %(elevation_files[j])
3866                raise DataMissingValuesError, msg
3867            else:
3868                elevation_grid = elevation_grid*(missing==0) + \
3869                                 missing*elevation_NaN_filler
3870
3871
3872        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3873        i = 0
3874        for k in range(number_of_latitudes):      #Y direction
3875            for l in range(number_of_longitudes): #X direction
3876                w = zscale*elevation_grid[k,l] + mean_stage
3877                stage[j,i] = w
3878                h = w - z[i]
3879                xmomentum[j,i] = u_momentum_grid[k,l]*h
3880                ymomentum[j,i] = v_momentum_grid[k,l]*h
3881                i += 1
3882    outfile.close()
3883
3884def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3885                        minlat=None, maxlat=None,
3886                        minlon=None, maxlon=None):
3887    """
3888    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3889    specified with min/max lat/long
3890
3891    Think of the latitudes and longitudes describing a 2d surface.
3892    The area returned is, if possible, just big enough to cover the
3893    inputed max/min area. (This will not be possible if the max/min area
3894    has a section outside of the latitudes/longitudes area.)
3895
3896    asset  longitudes are sorted,
3897    long - from low to high (west to east, eg 148 - 151)
3898    assert latitudes are sorted, ascending or decending
3899    """
3900    latitudes = latitudes_ref[:]
3901    longitudes = longitudes_ref[:]
3902
3903    latitudes = ensure_numeric(latitudes)
3904    longitudes = ensure_numeric(longitudes)
3905   
3906    assert allclose(sort(longitudes), longitudes)
3907   
3908    lat_ascending = True
3909    if not allclose(sort(latitudes), latitudes):
3910        lat_ascending = False
3911        # reverse order of lat, so it's in ascending order         
3912        latitudes = latitudes[::-1]
3913        assert allclose(sort(latitudes), latitudes)
3914    #print "latitudes  in funct", latitudes
3915   
3916    largest_lat_index = len(latitudes)-1
3917    #Cut out a smaller extent.
3918    if minlat == None:
3919        lat_min_index = 0
3920    else:
3921        lat_min_index = searchsorted(latitudes, minlat)-1
3922        if lat_min_index <0:
3923            lat_min_index = 0
3924
3925
3926    if maxlat == None:
3927        lat_max_index = largest_lat_index #len(latitudes)
3928    else:
3929        lat_max_index = searchsorted(latitudes, maxlat)
3930        if lat_max_index > largest_lat_index:
3931            lat_max_index = largest_lat_index
3932
3933    if minlon == None:
3934        lon_min_index = 0
3935    else:
3936        lon_min_index = searchsorted(longitudes, minlon)-1
3937        if lon_min_index <0:
3938            lon_min_index = 0
3939
3940    if maxlon == None:
3941        lon_max_index = len(longitudes)
3942    else:
3943        lon_max_index = searchsorted(longitudes, maxlon)
3944
3945    # Reversing the indexes, if the lat array is decending
3946    if lat_ascending is False:
3947        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3948                                       largest_lat_index - lat_min_index
3949    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3950    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3951
3952    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3953
3954
3955def _read_asc(filename, verbose=False):
3956    """Read esri file from the following ASCII format (.asc)
3957
3958    Example:
3959
3960    ncols         3121
3961    nrows         1800
3962    xllcorner     722000
3963    yllcorner     5893000
3964    cellsize      25
3965    NODATA_value  -9999
3966    138.3698 137.4194 136.5062 135.5558 ..........
3967
3968    """
3969
3970    datafile = open(filename)
3971
3972    if verbose: print 'Reading DEM from %s' %(filename)
3973    lines = datafile.readlines()
3974    datafile.close()
3975
3976    if verbose: print 'Got', len(lines), ' lines'
3977
3978    ncols = int(lines.pop(0).split()[1].strip())
3979    nrows = int(lines.pop(0).split()[1].strip())
3980    xllcorner = float(lines.pop(0).split()[1].strip())
3981    yllcorner = float(lines.pop(0).split()[1].strip())
3982    cellsize = float(lines.pop(0).split()[1].strip())
3983    NODATA_value = float(lines.pop(0).split()[1].strip())
3984
3985    assert len(lines) == nrows
3986
3987    #Store data
3988    grid = []
3989
3990    n = len(lines)
3991    for i, line in enumerate(lines):
3992        cells = line.split()
3993        assert len(cells) == ncols
3994        grid.append(array([float(x) for x in cells]))
3995    grid = array(grid)
3996
3997    return {'xllcorner':xllcorner,
3998            'yllcorner':yllcorner,
3999            'cellsize':cellsize,
4000            'NODATA_value':NODATA_value}, grid
4001
4002
4003
4004    ####  URS 2 SWW  ###
4005
4006lon_name = 'LON'
4007lat_name = 'LAT'
4008time_name = 'TIME'
4009precision = Float # So if we want to change the precision its done here       
4010class Write_nc:
4011    """
4012    Write an nc file.
4013   
4014    Note, this should be checked to meet cdc netcdf conventions for gridded
4015    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4016   
4017    """
4018    def __init__(self,
4019                 quantity_name,
4020                 file_name,
4021                 time_step_count,
4022                 time_step,
4023                 lon,
4024                 lat):
4025        """
4026        time_step_count is the number of time steps.
4027        time_step is the time step size
4028       
4029        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4030        """
4031        self.quantity_name = quantity_name
4032        quantity_units = {'HA':'CENTIMETERS',
4033                              'UA':'CENTIMETERS/SECOND',
4034                              'VA':'CENTIMETERS/SECOND'}       
4035       
4036        multiplier_dic = {'HA':100.0, # To convert from m to cm
4037                              'UA':100.0,  #  m/s to cm/sec
4038                              'VA':-100.0}  # MUX files have positve x in the
4039        # Southern direction.  This corrects for it, when writing nc files.
4040       
4041        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4042       
4043        #self.file_name = file_name
4044        self.time_step_count = time_step_count
4045        self.time_step = time_step
4046
4047        # NetCDF file definition
4048        self.outfile = NetCDFFile(file_name, 'w')
4049        outfile = self.outfile       
4050
4051        #Create new file
4052        nc_lon_lat_header(outfile, lon, lat)
4053   
4054        # TIME
4055        outfile.createDimension(time_name, None)
4056        outfile.createVariable(time_name, precision, (time_name,))
4057
4058        #QUANTITY
4059        outfile.createVariable(self.quantity_name, precision,
4060                               (time_name, lat_name, lon_name))
4061        outfile.variables[self.quantity_name].missing_value=-1.e+034
4062        outfile.variables[self.quantity_name].units= \
4063                                 quantity_units[self.quantity_name]
4064        outfile.variables[lon_name][:]= ensure_numeric(lon)
4065        outfile.variables[lat_name][:]= ensure_numeric(lat)
4066
4067        #Assume no one will be wanting to read this, while we are writing
4068        #outfile.close()
4069       
4070    def store_timestep(self, quantity_slice):
4071        """
4072        Write a time slice of quantity info
4073        quantity_slice is the data to be stored at this time step
4074        """
4075       
4076        outfile = self.outfile
4077       
4078        # Get the variables
4079        time = outfile.variables[time_name]
4080        quantity = outfile.variables[self.quantity_name]
4081           
4082        i = len(time)
4083
4084        #Store time
4085        time[i] = i*self.time_step #self.domain.time
4086        quantity[i,:] = quantity_slice* self.quantity_multiplier
4087       
4088    def close(self):
4089        self.outfile.close()
4090
4091def urs2sww(basename_in='o', basename_out=None, verbose=False,
4092            remove_nc_files=True,
4093            minlat=None, maxlat=None,
4094            minlon= None, maxlon=None,
4095            mint=None, maxt=None,
4096            mean_stage=0,
4097            origin = None,
4098            zscale=1,
4099            fail_on_NaN=True,
4100            NaN_filler=0,
4101            elevation=None):
4102    """
4103    Convert URS C binary format for wave propagation to
4104    sww format native to abstract_2d_finite_volumes.
4105
4106    Specify only basename_in and read files of the form
4107    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4108    basefilename_waveheight-n-mux containing relative height,
4109    x-velocity and y-velocity, respectively.
4110
4111    Also convert latitude and longitude to UTM. All coordinates are
4112    assumed to be given in the GDA94 datum. The latitude and longitude
4113    information is for  a grid.
4114
4115    min's and max's: If omitted - full extend is used.
4116    To include a value min may equal it, while max must exceed it.
4117    Lat and lon are assumed to be in decimal degrees.
4118    NOTE: minlon is the most east boundary.
4119   
4120    origin is a 3-tuple with geo referenced
4121    UTM coordinates (zone, easting, northing)
4122    It will be the origin of the sww file. This shouldn't be used,
4123    since all of anuga should be able to handle an arbitary origin.
4124
4125
4126    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4127    which means that latitude is the fastest
4128    varying dimension (row major order, so to speak)
4129
4130    In URS C binary the latitudes and longitudes are in assending order.
4131    """
4132    if basename_out == None:
4133        basename_out = basename_in
4134    files_out = urs2nc(basename_in, basename_out)
4135    ferret2sww(basename_out,
4136               minlat=minlat,
4137               maxlat=maxlat,
4138               minlon=minlon,
4139               maxlon=maxlon,
4140               mint=mint,
4141               maxt=maxt,
4142               mean_stage=mean_stage,
4143               origin=origin,
4144               zscale=zscale,
4145               fail_on_NaN=fail_on_NaN,
4146               NaN_filler=NaN_filler,
4147               inverted_bathymetry=True,
4148               verbose=verbose)
4149    #print "files_out",files_out
4150    if remove_nc_files:
4151        for file_out in files_out:
4152            os.remove(file_out)
4153   
4154def urs2nc(basename_in = 'o', basename_out = 'urs'):
4155    """
4156    Convert the 3 urs files to 4 nc files.
4157
4158    The name of the urs file names must be;
4159    [basename_in]_velocity-z-mux
4160    [basename_in]_velocity-e-mux
4161    [basename_in]_waveheight-n-mux
4162   
4163    """
4164   
4165    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4166                basename_in + EAST_VELOCITY_LABEL,
4167                basename_in + NORTH_VELOCITY_LABEL]
4168    files_out = [basename_out+'_ha.nc',
4169                 basename_out+'_ua.nc',
4170                 basename_out+'_va.nc']
4171    quantities = ['HA','UA','VA']
4172
4173    for file_name in files_in:
4174        if os.access(file_name, os.F_OK) == 0 :
4175            msg = 'File %s does not exist or is not accessible' %file_name
4176            raise IOError, msg
4177       
4178    hashed_elevation = None
4179    for file_in, file_out, quantity in map(None, files_in,
4180                                           files_out,
4181                                           quantities):
4182        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4183                                         file_out,
4184                                         quantity)
4185        #print "lonlatdep", lonlatdep
4186        if hashed_elevation == None:
4187            elevation_file = basename_out+'_e.nc'
4188            write_elevation_nc(elevation_file,
4189                                lon,
4190                                lat,
4191                                depth)
4192            hashed_elevation = myhash(lonlatdep)
4193        else:
4194            msg = "The elevation information in the mux files is inconsistent"
4195            assert hashed_elevation == myhash(lonlatdep), msg
4196    files_out.append(elevation_file)
4197    return files_out
4198   
4199def _binary_c2nc(file_in, file_out, quantity):
4200    """
4201    Reads in a quantity urs file and writes a quantity nc file.
4202    additionally, returns the depth and lat, long info,
4203    so it can be written to a file.
4204    """
4205    columns = 3 # long, lat , depth
4206    mux_file = open(file_in, 'rb')
4207   
4208    # Number of points/stations
4209    (points_num,)= unpack('i',mux_file.read(4))
4210
4211    # nt, int - Number of time steps
4212    (time_step_count,)= unpack('i',mux_file.read(4))
4213
4214    #dt, float - time step, seconds
4215    (time_step,) = unpack('f', mux_file.read(4))
4216   
4217    msg = "Bad data in the mux file."
4218    if points_num < 0:
4219        mux_file.close()
4220        raise ANUGAError, msg
4221    if time_step_count < 0:
4222        mux_file.close()
4223        raise ANUGAError, msg
4224    if time_step < 0:
4225        mux_file.close()
4226        raise ANUGAError, msg
4227   
4228    lonlatdep = p_array.array('f')
4229    lonlatdep.read(mux_file, columns * points_num)
4230    lonlatdep = array(lonlatdep, typecode=Float)   
4231    lonlatdep = reshape(lonlatdep, (points_num, columns))
4232   
4233    lon, lat, depth = lon_lat2grid(lonlatdep)
4234    lon_sorted = list(lon)
4235    lon_sorted.sort()
4236
4237    if not lon == lon_sorted:
4238        msg = "Longitudes in mux file are not in ascending order"
4239        raise IOError, msg
4240    lat_sorted = list(lat)
4241    lat_sorted.sort()
4242
4243    if not lat == lat_sorted:
4244        msg = "Latitudes in mux file are not in ascending order"
4245   
4246    nc_file = Write_nc(quantity,
4247                       file_out,
4248                       time_step_count,
4249                       time_step,
4250                       lon,
4251                       lat)
4252
4253    for i in range(time_step_count):
4254        #Read in a time slice  from mux file 
4255        hz_p_array = p_array.array('f')
4256        hz_p_array.read(mux_file, points_num)
4257        hz_p = array(hz_p_array, typecode=Float)
4258        hz_p = reshape(hz_p, (len(lon), len(lat)))
4259        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4260
4261        #write time slice to nc file
4262        nc_file.store_timestep(hz_p)
4263    mux_file.close()
4264    nc_file.close()
4265
4266    return lonlatdep, lon, lat, depth
4267   
4268
4269def write_elevation_nc(file_out, lon, lat, depth_vector):
4270    """
4271    Write an nc elevation file.
4272    """
4273   
4274    # NetCDF file definition
4275    outfile = NetCDFFile(file_out, 'w')
4276
4277    #Create new file
4278    nc_lon_lat_header(outfile, lon, lat)
4279   
4280    # ELEVATION
4281    zname = 'ELEVATION'
4282    outfile.createVariable(zname, precision, (lat_name, lon_name))
4283    outfile.variables[zname].units='CENTIMETERS'
4284    outfile.variables[zname].missing_value=-1.e+034
4285
4286    outfile.variables[lon_name][:]= ensure_numeric(lon)
4287    outfile.variables[lat_name][:]= ensure_numeric(lat)
4288
4289    depth = reshape(depth_vector, ( len(lat), len(lon)))
4290    outfile.variables[zname][:]= depth
4291   
4292    outfile.close()
4293   
4294def nc_lon_lat_header(outfile, lon, lat):
4295    """
4296    outfile is the netcdf file handle.
4297    lon - a list/array of the longitudes
4298    lat - a list/array of the latitudes
4299    """
4300   
4301    outfile.institution = 'Geoscience Australia'
4302    outfile.description = 'Converted from URS binary C'
4303   
4304    # Longitude
4305    outfile.createDimension(lon_name, len(lon))
4306    outfile.createVariable(lon_name, precision, (lon_name,))
4307    outfile.variables[lon_name].point_spacing='uneven'
4308    outfile.variables[lon_name].units='degrees_east'
4309    outfile.variables[lon_name].assignValue(lon)
4310
4311
4312    # Latitude
4313    outfile.createDimension(lat_name, len(lat))
4314    outfile.createVariable(lat_name, precision, (lat_name,))
4315    outfile.variables[lat_name].point_spacing='uneven'
4316    outfile.variables[lat_name].units='degrees_north'
4317    outfile.variables[lat_name].assignValue(lat)
4318
4319
4320   
4321def lon_lat2grid(long_lat_dep):
4322    """
4323    given a list of points that are assumed to be an a grid,
4324    return the long's and lat's of the grid.
4325    long_lat_dep is an array where each row is a position.
4326    The first column is longitudes.
4327    The second column is latitudes.
4328
4329    The latitude is the fastest varying dimension - in mux files
4330    """
4331    LONG = 0
4332    LAT = 1
4333    QUANTITY = 2
4334
4335    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4336   
4337    num_points = long_lat_dep.shape[0]
4338    this_rows_long = long_lat_dep[0,LONG]
4339
4340    # Count the length of unique latitudes
4341    i = 0
4342    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4343        i += 1
4344    # determine the lats and longsfrom the grid
4345    lat = long_lat_dep[:i, LAT]       
4346    long = long_lat_dep[::i, LONG]
4347   
4348    lenlong = len(long)
4349    lenlat = len(lat)
4350    #print 'len lat', lat, len(lat)
4351    #print 'len long', long, len(long)
4352         
4353    msg = 'Input data is not gridded'     
4354    assert num_points % lenlat == 0, msg
4355    assert num_points % lenlong == 0, msg
4356         
4357    # Test that data is gridded       
4358    for i in range(lenlong):
4359        msg = 'Data is not gridded.  It must be for this operation'
4360        first = i*lenlat
4361        last = first + lenlat
4362               
4363        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4364        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4365   
4366   
4367#    print 'range long', min(long), max(long)
4368#    print 'range lat', min(lat), max(lat)
4369#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4370#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4371   
4372   
4373   
4374    msg = 'Out of range latitudes/longitudes'
4375    for l in lat:assert -90 < l < 90 , msg
4376    for l in long:assert -180 < l < 180 , msg
4377
4378    # Changing quantity from lat being the fastest varying dimension to
4379    # long being the fastest varying dimension
4380    # FIXME - make this faster/do this a better way
4381    # use numeric transpose, after reshaping the quantity vector
4382#    quantity = zeros(len(long_lat_dep), Float)
4383    quantity = zeros(num_points, Float)
4384   
4385#    print 'num',num_points
4386    for lat_i, _ in enumerate(lat):
4387        for long_i, _ in enumerate(long):
4388            q_index = lat_i*lenlong+long_i
4389            lld_index = long_i*lenlat+lat_i
4390#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4391            temp = long_lat_dep[lld_index, QUANTITY]
4392            quantity[q_index] = temp
4393           
4394    return long, lat, quantity
4395
4396    ####  END URS 2 SWW  ###
4397
4398    #### URS UNGRIDDED 2 SWW ###
4399
4400    ### PRODUCING THE POINTS NEEDED FILE ###
4401LL_LAT = -50.0
4402LL_LONG = 80.0
4403GRID_SPACING = 1.0/60.0
4404LAT_AMOUNT = 4800
4405LONG_AMOUNT = 3600
4406def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4407                              ll_lat=LL_LAT, ll_long=LL_LONG,
4408                              grid_spacing=GRID_SPACING, 
4409                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4410                              export_csv=False, use_cache=False,
4411                              verbose=False):
4412    """
4413    file_name - name of the urs file produced for David.
4414    boundary_polygon - a list of points that describes a polygon.
4415                      The last point is assumed ot join the first point.
4416                      This is in UTM (lat long would be better though)
4417
4418    ll_lat - lower left latitude, in decimal degrees
4419    ll-long - lower left longitude, in decimal degrees
4420    grid_spacing - in deciamal degrees
4421
4422
4423    Don't add the file extension.  It will be added.
4424    """
4425    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4426                            grid_spacing, 
4427                      lat_amount, long_amount,use_cache, verbose)
4428    if not file_name[-4:] == ".urs":
4429        file_name += ".urs"
4430    geo.export_points_file(file_name)
4431    if export_csv:
4432        if file_name[-4:] == ".urs":
4433            file_name = file_name[:-4] + ".csv"
4434        geo.export_points_file(file_name)
4435
4436def URS_points_needed(boundary_polygon, zone, ll_lat=LL_LAT,
4437                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4438                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4439                      use_cache=False, verbose=False):
4440    args = (boundary_polygon,
4441                      zone)
4442    kwargs = {'ll_lat': ll_lat,
4443              'll_long': ll_long,
4444              'grid_spacing': grid_spacing,
4445              'lat_amount': lat_amount,
4446              'long_amount': long_amount} 
4447    if use_cache is True:
4448        try:
4449            from anuga.caching import cache
4450        except:
4451            msg = 'Caching was requested, but caching module'+\
4452                  'could not be imported'
4453            raise msg
4454
4455
4456        geo = cache(_URS_points_needed,
4457                  args, kwargs,
4458                  verbose=verbose,
4459                  compression=False)
4460    else:
4461        #I was getting 'got multiple values for keyword argument' errors
4462        #geo = apply(_URS_points_needed, args, kwargs)
4463        geo = _URS_points_needed(boundary_polygon,
4464                      zone, ll_lat,
4465                      ll_long, grid_spacing, 
4466                      lat_amount, long_amount)
4467
4468    return geo   
4469def _URS_points_needed(boundary_polygon,
4470                      zone, ll_lat=LL_LAT,
4471                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4472                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT):
4473    """
4474
4475    boundary_polygon - a list of points that describes a polygon.
4476                      The last point is assumed ot join the first point.
4477                      This is in UTM (lat long would be better though)
4478
4479    ll_lat - lower left latitude, in decimal degrees
4480    ll-long - lower left longitude, in decimal degrees
4481    grid_spacing - in deciamal degrees
4482
4483    """
4484   
4485    from sets import ImmutableSet
4486   
4487    msg = "grid_spacing can not be zero"
4488    assert not grid_spacing ==0, msg
4489    a = boundary_polygon
4490    # List of segments.  Each segment is two points.
4491    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4492
4493    # convert the segs to Lat's and longs.
4494   
4495    # Don't assume the zone of the segments is the same as the lower left
4496    # corner of the lat long data!!  They can easily be in different zones
4497   
4498    lat_long_set = ImmutableSet()
4499    for seg in segs:
4500        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4501                      lat_amount, long_amount, zone)
4502        lat_long_set |= ImmutableSet(points_lat_long)
4503    #print "lat_long_set",lat_long_set
4504    geo = Geospatial_data(data_points=list(lat_long_set),
4505                              points_are_lats_longs=True)
4506    return geo
4507   
4508def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4509                  lat_amount, long_amount, zone):
4510    """
4511    return a list of the points, in lats and longs that are needed to
4512    interpolate any point on the segment.
4513    """
4514    from math import sqrt
4515    #print "zone",zone
4516    geo_reference = Geo_reference(zone=zone)
4517    #print "seg",seg
4518    geo = Geospatial_data(seg,geo_reference=geo_reference)
4519    seg_lat_long = geo.get_data_points(as_lat_long=True)
4520    #print "seg_lat_long", seg_lat_long
4521    # 1.415 = 2^0.5, rounded up....
4522    sqrt_2_rounded_up = 1.415
4523    buffer = sqrt_2_rounded_up * grid_spacing
4524   
4525    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4526    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4527    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4528    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4529
4530    first_row = (min_long - ll_long)/grid_spacing
4531    # To round up
4532    first_row_long = int(round(first_row + 0.5))
4533    #print "first_row", first_row_long
4534
4535    last_row = (max_long - ll_long)/grid_spacing # round down
4536    last_row_long = int(round(last_row))
4537    #print "last_row",last_row _long
4538   
4539    first_row = (min_lat - ll_lat)/grid_spacing
4540    # To round up
4541    first_row_lat = int(round(first_row + 0.5))
4542    #print "first_row", first_row_lat
4543
4544    last_row = (max_lat - ll_lat)/grid_spacing # round down
4545    last_row_lat = int(round(last_row))
4546    #print "last_row",last_row_lat
4547
4548    # to work out the max distance -
4549    # 111120 - horizontal distance between 1 deg latitude.
4550    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4551    max_distance = 157147.4112 * grid_spacing
4552    #print "max_distance", max_distance #2619.12 m for 1 minute
4553    points_lat_long = []
4554    # Create a list of the lat long points to include.
4555    for index_lat in range(first_row_lat, last_row_lat + 1):
4556        for index_long in range(first_row_long, last_row_long + 1):
4557            lat = ll_lat + index_lat*grid_spacing
4558            long = ll_long + index_long*grid_spacing
4559
4560            #filter here to keep good points
4561            if keep_point(lat, long, seg, max_distance):
4562                points_lat_long.append((lat, long)) #must be hashable
4563    #print "points_lat_long", points_lat_long
4564
4565    # Now that we have these points, lets throw ones out that are too far away
4566    return points_lat_long
4567
4568def keep_point(lat, long, seg, max_distance):
4569    """
4570    seg is two points, UTM
4571    """
4572    from math import sqrt
4573    _ , x0, y0 = redfearn(lat, long)
4574    x1 = seg[0][0]
4575    y1 = seg[0][1]
4576    x2 = seg[1][0]
4577    y2 = seg[1][1]
4578
4579    x2_1 = x2-x1
4580    y2_1 = y2-y1
4581    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4582    if d <= max_distance:
4583        return True
4584    else:
4585        return False
4586   
4587    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4588   
4589WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
4590EAST_VELOCITY_LABEL =  '_velocity-e-mux'
4591NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
4592def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4593                      mint=None, maxt=None,
4594                      mean_stage=0,
4595                      origin=None,
4596                      hole_points_UTM=None,
4597                      zscale=1):
4598    """   
4599    Convert URS C binary format for wave propagation to
4600    sww format native to abstract_2d_finite_volumes.
4601
4602
4603    Specify only basename_in and read files of the form
4604    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4605    basefilename_waveheight-n-mux containing relative height,
4606    x-velocity and y-velocity, respectively.
4607
4608    Also convert latitude and longitude to UTM. All coordinates are
4609    assumed to be given in the GDA94 datum. The latitude and longitude
4610    information is assumed ungridded grid.
4611
4612    min's and max's: If omitted - full extend is used.
4613    To include a value min ans max may equal it.
4614    Lat and lon are assumed to be in decimal degrees.
4615   
4616    origin is a 3-tuple with geo referenced
4617    UTM coordinates (zone, easting, northing)
4618    It will be the origin of the sww file. This shouldn't be used,
4619    since all of anuga should be able to handle an arbitary origin.
4620    The mux point info is NOT relative to this origin.
4621
4622
4623    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4624    which means that latitude is the fastest
4625    varying dimension (row major order, so to speak)
4626
4627    In URS C binary the latitudes and longitudes are in assending order.
4628
4629    Note, interpolations of the resulting sww file will be different
4630    from results of urs2sww.  This is due to the interpolation
4631    function used, and the different grid structure between urs2sww
4632    and this function.
4633   
4634    Interpolating data that has an underlying gridded source can
4635    easily end up with different values, depending on the underlying
4636    mesh.
4637
4638    consider these 4 points
4639    50  -50
4640
4641    0     0
4642
4643    The grid can be
4644     -
4645    |\|    A
4646     -
4647     or;
4648      -
4649     |/|   B
4650      -
4651      If a point is just below the center of the midpoint, it will have a
4652      +ve value in grid A and a -ve value in grid B.
4653    """ 
4654    from anuga.pmesh.mesh import Mesh, NoTrianglesError
4655
4656    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4657                basename_in + EAST_VELOCITY_LABEL,
4658                basename_in + NORTH_VELOCITY_LABEL]
4659    quantities = ['HA','UA','VA']
4660
4661    # instanciate urs_points of the three mux files.
4662    mux = {}
4663    for quantity, file in map(None, quantities, files_in):
4664        mux[quantity] = Urs_points(file)
4665       
4666    # Could check that the depth is the same. (hashing)
4667
4668    # handle to a mux file to do depth stuff
4669    a_mux = mux[quantities[0]]
4670   
4671    # Convert to utm
4672    lat = a_mux.lonlatdep[:,1]
4673    long = a_mux.lonlatdep[:,0]
4674    points_utm, zone = convert_from_latlon_to_utm( \
4675        latitudes=lat, longitudes=long)
4676    #print "points_utm", points_utm
4677    #print "zone", zone
4678
4679    elevation = a_mux.lonlatdep[:,2] * -1 #
4680   
4681    # grid ( create a mesh from the selected points)
4682    # This mesh has a problem.  Triangles are streched over ungridded areas.
4683    #  If these areas could be described as holes in pmesh, that would be great
4684
4685    # I can't just get the user to selection a point in the middle.
4686    # A boundary is needed around these points.
4687    # But if the zone of points is obvious enough auto-segment should do
4688    # a good boundary.
4689    mesh = Mesh()
4690    mesh.add_vertices(points_utm)
4691    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4692    # To try and avoid alpha shape 'hugging' too much
4693    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
4694    if hole_points_UTM is not None:
4695        point = ensure_absolute(hole_points_UTM)
4696        mesh.add_hole(point[0], point[1])
4697    try:
4698        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4699    except NoTrianglesError:
4700        # This is a bit of a hack, going in and changing the
4701        # data structure.
4702        mesh.holes = []
4703        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4704    mesh_dic = mesh.Mesh2MeshList()
4705
4706    #mesh.export_mesh_file(basename_in + '_168.tsh')
4707    #import sys; sys.exit()
4708    # These are the times of the mux file
4709    mux_times = []
4710    for i in range(a_mux.time_step_count):
4711        mux_times.append(a_mux.time_step * i) 
4712    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4713    times = mux_times[mux_times_start_i:mux_times_fin_i]
4714   
4715    if mux_times_start_i == mux_times_fin_i:
4716        # Close the mux files
4717        for quantity, file in map(None, quantities, files_in):
4718            mux[quantity].close()
4719        msg="Due to mint and maxt there's no time info in the boundary SWW."
4720        raise Exception, msg
4721       
4722    # If this raise is removed there is currently no downstream errors
4723           
4724    points_utm=ensure_numeric(points_utm)
4725    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4726    #print "points_utm", points_utm
4727    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4728           ensure_numeric(points_utm)
4729   
4730    volumes = mesh_dic['generatedtrianglelist']
4731   
4732    # write sww intro and grid stuff.   
4733    if basename_out is None:
4734        swwname = basename_in + '.sww'
4735    else:
4736        swwname = basename_out + '.sww'
4737
4738    if verbose: print 'Output to ', swwname
4739    outfile = NetCDFFile(swwname, 'w')
4740    # For a different way of doing this, check out tsh2sww
4741    # work out sww_times and the index range this covers
4742    sww = Write_sww()
4743    sww.store_header(outfile, times, len(volumes), len(points_utm),
4744                     verbose=verbose)
4745    outfile.mean_stage = mean_stage
4746    outfile.zscale = zscale
4747
4748    sww.store_triangulation(outfile, points_utm, volumes,
4749                            elevation, zone,  new_origin=origin,
4750                            verbose=verbose)
4751   
4752    if verbose: print 'Converting quantities'
4753    j = 0
4754    # Read in a time slice from each mux file and write it to the sww file
4755    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
4756        if j >= mux_times_start_i and j < mux_times_fin_i:
4757            stage = zscale*ha + mean_stage
4758            h = stage - elevation
4759            xmomentum = ua*h
4760            ymomentum = -1*va*h # -1 since in mux files south is positive.
4761            sww.store_quantities(outfile, 
4762                                 slice_index=j - mux_times_start_i,
4763                                 verbose=verbose,
4764                                 stage=stage,
4765                                 xmomentum=xmomentum,
4766                                 ymomentum=ymomentum)
4767        j += 1
4768    if verbose: sww.verbose_quantities(outfile)
4769    outfile.close()
4770    #
4771    # Do some conversions while writing the sww file
4772
4773   
4774def mux2sww_time(mux_times, mint, maxt):
4775    """
4776    """
4777
4778    if mint == None:
4779        mux_times_start_i = 0
4780    else:
4781        mux_times_start_i = searchsorted(mux_times, mint)
4782       
4783    if maxt == None:
4784        mux_times_fin_i = len(mux_times)
4785    else:
4786        maxt += 0.5 # so if you specify a time where there is
4787                    # data that time will be included
4788        mux_times_fin_i = searchsorted(mux_times, maxt)
4789
4790    return mux_times_start_i, mux_times_fin_i
4791
4792
4793class Write_sww:
4794    from anuga.shallow_water.shallow_water_domain import Domain
4795
4796    # FIXME (Ole): Hardwiring the conserved quantities like
4797    # this could be a problem. I would prefer taking them from
4798    # the instantiation of Domain.
4799    #
4800    # (DSG) There is not always a Domain instance when Write_sww is used.
4801    # Check to see if this is the same level of hardwiring as is in
4802    # shallow water doamain.
4803   
4804    sww_quantities = Domain.conserved_quantities
4805
4806
4807    RANGE = '_range'
4808    EXTREMA = ':extrema'
4809
4810    def __init__(self):
4811        pass
4812   
4813    def store_header(self,
4814                     outfile,
4815                     times,
4816                     number_of_volumes,
4817                     number_of_points,
4818                     description='Converted from XXX',
4819                     smoothing=True,
4820                     order=1, verbose=False):
4821        """
4822        outfile - the name of the file that will be written
4823        times - A list of the time slice times OR a start time
4824        Note, if a list is given the info will be made relative.
4825        number_of_volumes - the number of triangles
4826        """
4827   
4828        outfile.institution = 'Geoscience Australia'
4829        outfile.description = description
4830
4831        # For sww compatibility
4832        if smoothing is True:
4833            # Smoothing to be depreciated
4834            outfile.smoothing = 'Yes'
4835            outfile.vertices_are_stored_uniquely = 'False'
4836        else:
4837            # Smoothing to be depreciated
4838            outfile.smoothing = 'No'
4839            outfile.vertices_are_stored_uniquely = 'True'
4840        outfile.order = order
4841
4842        try:
4843            revision_number = get_revision_number()
4844        except:
4845            revision_number = None
4846        # Allow None to be stored as a string               
4847        outfile.revision_number = str(revision_number) 
4848
4849
4850       
4851        # times - A list or array of the time slice times OR a start time
4852        # times = ensure_numeric(times)
4853        # Start time in seconds since the epoch (midnight 1/1/1970)
4854
4855        # This is being used to seperate one number from a list.
4856        # what it is actually doing is sorting lists from numeric arrays.
4857        if type(times) is list or type(times) is ArrayType: 
4858            number_of_times = len(times)
4859            times = ensure_numeric(times) 
4860            if number_of_times == 0:
4861                starttime = 0
4862            else:
4863                starttime = times[0]
4864                times = times - starttime  #Store relative times
4865        else:
4866            number_of_times = 0
4867            starttime = times
4868            #times = ensure_numeric([])
4869        outfile.starttime = starttime
4870        # dimension definitions
4871        outfile.createDimension('number_of_volumes', number_of_volumes)
4872        outfile.createDimension('number_of_vertices', 3)
4873        outfile.createDimension('numbers_in_range', 2)
4874   
4875        if smoothing is True:
4876            outfile.createDimension('number_of_points', number_of_points)
4877       
4878            # FIXME(Ole): This will cause sww files for paralle domains to
4879            # have ghost nodes stored (but not used by triangles).
4880            # To clean this up, we have to change get_vertex_values and
4881            # friends in quantity.py (but I can't be bothered right now)
4882        else:
4883            outfile.createDimension('number_of_points', 3*number_of_volumes)
4884        outfile.createDimension('number_of_timesteps', number_of_times)
4885
4886        # variable definitions
4887        outfile.createVariable('x', precision, ('number_of_points',))
4888        outfile.createVariable('y', precision, ('number_of_points',))
4889        outfile.createVariable('elevation', precision, ('number_of_points',))
4890        q = 'elevation'
4891        outfile.createVariable(q+Write_sww.RANGE, precision,
4892                               ('numbers_in_range',))
4893
4894
4895        # Initialise ranges with small and large sentinels.
4896        # If this was in pure Python we could have used None sensibly
4897        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min               
4898        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4899
4900        # FIXME: Backwards compatibility
4901        outfile.createVariable('z', precision, ('number_of_points',))
4902        #################################
4903
4904        outfile.createVariable('volumes', Int, ('number_of_volumes',
4905                                                'number_of_vertices'))
4906
4907        outfile.createVariable('time', precision,
4908                               ('number_of_timesteps',))
4909       
4910        for q in Write_sww.sww_quantities:
4911            outfile.createVariable(q, precision,
4912                                   ('number_of_timesteps',
4913                                    'number_of_points')) 
4914            outfile.createVariable(q+Write_sww.RANGE, precision,
4915                                   ('numbers_in_range',))
4916
4917            # Initialise ranges with small and large sentinels.
4918            # If this was in pure Python we could have used None sensibly
4919            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min               
4920            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4921           
4922        if type(times) is list or type(times) is ArrayType: 
4923            outfile.variables['time'][:] = times    #Store time relative
4924           
4925        if verbose:
4926            print '------------------------------------------------'
4927            print 'Statistics:'
4928            print '    t in [%f, %f], len(t) == %d'\
4929                  %(min(times.flat), max(times.flat), len(times.flat))
4930
4931       
4932    def store_triangulation(self,
4933                            outfile,
4934                            points_utm,
4935                            volumes,
4936                            elevation, zone=None, new_origin=None, 
4937                            points_georeference=None, verbose=False):
4938        """
4939       
4940        new_origin - qa georeference that the points can be set to. (Maybe
4941        do this before calling this function.)
4942       
4943        points_utm - currently a list or array of the points in UTM.
4944        points_georeference - the georeference of the points_utm
4945       
4946        How about passing new_origin and current_origin.
4947        If you get both, do a convertion from the old to the new.
4948       
4949        If you only get new_origin, the points are absolute,
4950        convert to relative
4951       
4952        if you only get the current_origin the points are relative, store
4953        as relative.
4954       
4955        if you get no georefs create a new georef based on the minimums of
4956        points_utm.  (Another option would be to default to absolute)
4957       
4958        Yes, and this is done in another part of the code.
4959        Probably geospatial.
4960       
4961        If you don't supply either geo_refs, then supply a zone. If not
4962        the default zone will be used.
4963       
4964       
4965        precon
4966       
4967        header has been called.
4968        """
4969       
4970        number_of_points = len(points_utm)   
4971        volumes = array(volumes) 
4972        points_utm = array(points_utm)
4973
4974        # given the two geo_refs and the points, do the stuff
4975        # described in the method header
4976        # if this is needed else where, pull out as a function
4977        points_georeference = ensure_geo_reference(points_georeference)
4978        new_origin = ensure_geo_reference(new_origin)
4979        if new_origin is None and points_georeference is not None:
4980            points = points_utm
4981            geo_ref = points_georeference
4982        else:
4983            if new_origin is None:
4984                new_origin = Geo_reference(zone,min(points_utm[:,0]),
4985                                           min(points_utm[:,1]))
4986            points = new_origin.change_points_geo_ref(points_utm,
4987                                                      points_georeference)
4988            geo_ref = new_origin
4989
4990        # At this stage I need a georef and points
4991        # the points are relative to the georef
4992        geo_ref.write_NetCDF(outfile)
4993   
4994        # This will put the geo ref in the middle
4995        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4996       
4997        x =  points[:,0]
4998        y =  points[:,1]
4999        z = outfile.variables['z'][:]
5000   
5001        if verbose:
5002            print '------------------------------------------------'
5003            print 'More Statistics:'
5004            print '  Extent (/lon):'
5005            print '    x in [%f, %f], len(lat) == %d'\
5006                  %(min(x), max(x),
5007                    len(x))
5008            print '    y in [%f, %f], len(lon) == %d'\
5009                  %(min(y), max(y),
5010                    len(y))
5011            print '    z in [%f, %f], len(z) == %d'\
5012                  %(min(elevation), max(elevation),
5013                    len(elevation))
5014            print 'geo_ref: ',geo_ref
5015            print '------------------------------------------------'
5016           
5017        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5018        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5019        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5020        outfile.variables['z'][:] = elevation
5021        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5022        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
5023
5024        q = 'elevation'
5025        # This updates the _range values
5026        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5027        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
5028
5029
5030    def store_quantities(self, outfile, precision=Float,
5031                         slice_index=None, time=None,
5032                         verbose=False, **quant):
5033        """
5034        Write the quantity info.
5035
5036        **quant is extra keyword arguments passed in. These must be
5037          the sww quantities, currently; stage, xmomentum, ymomentum.
5038       
5039        if the time array is already been built, use the slice_index
5040        to specify the index.
5041       
5042        Otherwise, use time to increase the time dimension
5043
5044        Maybe make this general, but the viewer assumes these quantities,
5045        so maybe we don't want it general - unless the viewer is general
5046       
5047        precon
5048        triangulation and
5049        header have been called.
5050        """
5051
5052        if time is not None:
5053            file_time = outfile.variables['time']
5054            slice_index = len(file_time)
5055            file_time[slice_index] = time   
5056
5057        # write the conserved quantities from Domain.
5058        # Typically stage,  xmomentum, ymomentum
5059        # other quantities will be ignored, silently.
5060        for q in Write_sww.sww_quantities:
5061            if not quant.has_key(q):
5062                msg = 'SWW file can not write quantity %s' %q
5063                raise NewQuantity, msg
5064            else:
5065                q_values = quant[q]
5066                outfile.variables[q][slice_index] = q_values.astype(precision)
5067
5068                # This updates the _range values
5069                q_range = outfile.variables[q+Write_sww.RANGE][:]
5070                q_values_min = min(q_values)
5071                if q_values_min < q_range[0]:
5072                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5073                q_values_max = max(q_values)
5074                if q_values_max > q_range[1]:
5075                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5076
5077    def verbose_quantities(self, outfile):
5078        print '------------------------------------------------'
5079        print 'More Statistics:'
5080        for q in Write_sww.sww_quantities:
5081            print %s in [%f, %f]' %(q,
5082                                       outfile.variables[q+Write_sww.RANGE][0],
5083                                       outfile.variables[q+Write_sww.RANGE][1])
5084        print '------------------------------------------------'
5085
5086
5087       
5088def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
5089                         mean_stage=0, zscale=1,
5090                         verbose=False):   
5091    #Time stepping
5092    stage = outfile.variables['stage']
5093    xmomentum = outfile.variables['xmomentum']
5094    ymomentum = outfile.variables['ymomentum']
5095
5096    n = len(has)
5097    j=0
5098    for ha, ua, va in map(None, has, uas, vas):
5099        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5100        w = zscale*ha + mean_stage
5101        stage[j] = w
5102        h = w - elevation
5103        xmomentum[j] = ua*h
5104        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
5105        j += 1
5106   
5107def urs2txt(basename_in, location_index=None):
5108    """
5109    Not finished or tested
5110    """
5111   
5112    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5113                basename_in + EAST_VELOCITY_LABEL,
5114                basename_in + NORTH_VELOCITY_LABEL]
5115    quantities = ['HA','UA','VA']
5116
5117    d = ","
5118   
5119    # instanciate urs_points of the three mux files.
5120    mux = {}
5121    for quantity, file in map(None, quantities, files_in):
5122        mux[quantity] = Urs_points(file)
5123       
5124    # Could check that the depth is the same. (hashing)
5125
5126    # handle to a mux file to do depth stuff
5127    a_mux = mux[quantities[0]]
5128   
5129    # Convert to utm
5130    latitudes = a_mux.lonlatdep[:,1]
5131    longitudes = a_mux.lonlatdep[:,0]
5132    points_utm, zone = convert_from_latlon_to_utm( \
5133        latitudes=latitudes, longitudes=longitudes)
5134    #print "points_utm", points_utm
5135    #print "zone", zone
5136    depths = a_mux.lonlatdep[:,2]  #
5137   
5138    fid = open(basename_in + '.txt', 'w')
5139
5140    fid.write("zone: " + str(zone) + "\n")
5141
5142    if location_index is not None:
5143        #Title
5144        li = location_index
5145        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5146                  'Northing' + "\n")
5147        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5148              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5149              str(points_utm[li][01]) + "\n")
5150
5151    # the non-time dependent stuff
5152    #Title
5153    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5154                  'Northing' +d+ 'depth m' + "\n")
5155    i = 0
5156    for depth, point_utm, lat, long in map(None, depths,
5157                                               points_utm, latitudes,
5158                                               longitudes):
5159       
5160        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5161                  str(point_utm[01]) +d+ str(depth) + "\n")
5162        i +=1
5163    #Time dependent
5164    if location_index is not None:
5165        time_step = a_mux.time_step
5166        i = 0
5167        #Title
5168        fid.write('time' +d+ 'HA depth m'+d+ \
5169                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
5170                      + "\n")
5171        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5172            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5173                      str(UA[location_index]) +d+ str(VA[location_index]) \
5174                      + "\n")
5175           
5176            i +=1
5177   
5178class Urs_points:
5179    """
5180    Read the info in URS mux files.
5181
5182    for the quantities heres a correlation between the file names and
5183    what they mean;
5184    z-mux is height above sea level, m
5185    e-mux is velocity is Eastern direction, m/s
5186    n-mux is velocity is Northern direction, m/s   
5187    """
5188    def __init__(self,urs_file):
5189        self.iterated = False
5190        columns = 3 # long, lat , depth
5191        mux_file = open(urs_file, 'rb')
5192       
5193        # Number of points/stations
5194        (self.points_num,)= unpack('i',mux_file.read(4))
5195       
5196        # nt, int - Number of time steps
5197        (self.time_step_count,)= unpack('i',mux_file.read(4))
5198        #print "self.time_step_count", self.time_step_count
5199        #dt, float - time step, seconds
5200        (self.time_step,) = unpack('f', mux_file.read(4))
5201        #print "self.time_step", self.time_step
5202        msg = "Bad data in the urs file."
5203        if self.points_num < 0:
5204            mux_file.close()
5205            raise ANUGAError, msg
5206        if self.time_step_count < 0:
5207            mux_file.close()
5208            raise ANUGAError, msg
5209        if self.time_step < 0:
5210            mux_file.close()
5211            raise ANUGAError, msg
5212
5213        # the depth is in meters, and it is the distance from the ocean
5214        # to the sea bottom.
5215        lonlatdep = p_array.array('f')
5216        lonlatdep.read(mux_file, columns * self.points_num)
5217        lonlatdep = array(lonlatdep, typecode=Float)   
5218        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5219        #print 'lonlatdep',lonlatdep
5220        self.lonlatdep = lonlatdep
5221       
5222        self.mux_file = mux_file
5223        # check this array
5224
5225    def __iter__(self):
5226        """
5227        iterate over quantity data which is with respect to time.
5228
5229        Note: You can only interate once over an object
5230       
5231        returns quantity infomation for each time slice
5232        """
5233        msg =  "You can only interate once over a urs file."
5234        assert not self.iterated, msg
5235        self.iter_time_step = 0
5236        self.iterated = True
5237        return self
5238   
5239    def next(self):
5240        if self.time_step_count == self.iter_time_step:
5241            self.close()
5242            raise StopIteration
5243        #Read in a time slice  from mux file 
5244        hz_p_array = p_array.array('f')
5245        hz_p_array.read(self.mux_file, self.points_num)
5246        hz_p = array(hz_p_array, typecode=Float)
5247        self.iter_time_step += 1
5248       
5249        return hz_p
5250
5251    def close(self):
5252        self.mux_file.close()
5253       
5254    #### END URS UNGRIDDED 2 SWW ###
5255
5256       
5257def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
5258                         print_to_screen=False, verbose=False):
5259    """
5260    Used to store screen output and errors to file, if run on multiple
5261    processes eachprocessor will have its own output and error file.
5262   
5263    extra_info - is used as a string that can identify outputs with another
5264    string eg. '_other'
5265    """
5266    import sys
5267    dir_name = dir_name
5268    if access(dir_name,W_OK) == 0:
5269        if verbose: print 'Make directory %s' %dir_name
5270        if verbose: print "myid", myid
5271        mkdir (dir_name,0777)
5272    if myid <>'':
5273        myid = '_'+str(myid)
5274    if numprocs <>'':
5275        numprocs = '_'+str(numprocs)
5276    if extra_info <>'':
5277        extra_info = '_'+str(extra_info)
5278    screen_output_name = dir_name + "screen_output%s%s%s.txt" %(myid,
5279                                                                numprocs,
5280                                                                extra_info)
5281    screen_error_name = dir_name + "screen_error%s%s%s.txt" %(myid,
5282                                                              numprocs,
5283                                                              extra_info)
5284    print screen_output_name
5285    #used to catch screen output to file
5286    sys.stdout = Screen_Catcher(screen_output_name)
5287    sys.stderr = Screen_Catcher(screen_error_name)
5288
5289class Screen_Catcher:
5290    """this simply catches the screen output and stores it to file defined by
5291    start_screen_catcher (above)
5292    """
5293   
5294    def __init__(self, filename):
5295        self.filename = filename
5296 
5297        if exists(self.filename)is True:
5298            print'Old existing file "%s" has been deleted' %(self.filename)
5299            remove(self.filename)
5300
5301    def write(self, stuff):
5302        fid = open(self.filename, 'a')
5303        fid.write(stuff)
5304#        if print_to_screen: print stuff
5305
5306def copy_code_files(dir_name, filename1, filename2):
5307    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5308    information management
5309    filename1 and filename2 are both absolute pathnames   
5310    """
5311
5312    if access(dir_name,F_OK) == 0:
5313        print 'Make directory %s' %dir_name
5314        mkdir (dir_name,0777)
5315    shutil.copy(filename1, dir_name + sep + basename(filename1))
5316    shutil.copy(filename2, dir_name + sep + basename(filename2))
5317#    copy (__file__, project.output_run_time_dir + basename(__file__))
5318    print 'Files %s and %s copied' %(filename1, filename2)
5319
5320def get_data_from_file(filename,separator_value = ','):
5321    """
5322    Read in data information from file and
5323   
5324    Returns:
5325        header_fields, a string? of the first line separated
5326        by the 'separator_value'
5327       
5328        data, a array (N data columns X M lines) in the file
5329        excluding the header
5330       
5331    NOTE: wont deal with columns with different lenghts and there must be
5332    no blank lines at the end.
5333    """
5334   
5335    fid = open(filename)
5336    lines = fid.readlines()
5337   
5338    fid.close()
5339   
5340    header_line = lines[0]
5341    header_fields = header_line.split(separator_value)
5342
5343    #array to store data, number in there is to allow float...
5344    #i'm sure there is a better way!
5345    data=array([],typecode=Float)
5346    data=resize(data,((len(lines)-1),len(header_fields)))
5347#    print 'number of fields',range(len(header_fields))
5348#    print 'number of lines',len(lines), shape(data)
5349#    print'data',data[1,1],header_line
5350
5351    array_number = 0
5352    line_number = 1
5353    while line_number < (len(lines)):
5354        for i in range(len(header_fields)): 
5355            #this get line below the header, explaining the +1
5356            #and also the line_number can be used as the array index
5357            fields = lines[line_number].split(separator_value)
5358            #assign to array
5359            data[array_number,i] = float(fields[i])
5360           
5361        line_number = line_number +1
5362        array_number = array_number +1
5363       
5364    return header_fields, data
5365
5366def store_parameters(verbose=False,**kwargs):
5367    """
5368    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5369    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5370   
5371    Must have a file_name keyword arg, this is what is writing to.
5372    might be a better way to do this using CSV module Writer and writeDict
5373   
5374    writes file to "output_dir" unless "completed" is in kwargs, then
5375    it writes to "file_name" kwargs
5376
5377    """
5378    import types
5379#    import os
5380   
5381    # Check that kwargs is a dictionary
5382    if type(kwargs) != types.DictType:
5383        raise TypeError
5384   
5385    #is completed is kwargs?
5386    try:
5387        kwargs['completed']
5388        completed=True
5389    except:
5390        completed=False
5391 
5392    #get file name and removes from dict and assert that a file_name exists
5393    if completed:
5394        try:
5395            file = str(kwargs['file_name'])
5396        except:
5397            raise 'kwargs must have file_name'
5398    else:
5399        #write temp file in output directory
5400        try:
5401            file = str(kwargs['output_dir'])+'detail_temp.csv'
5402        except:
5403            raise 'kwargs must have output_dir'
5404       
5405    #extracts the header info and the new line info
5406    line=''
5407    header=''
5408    count=0
5409    keys = kwargs.keys()
5410    keys.sort()
5411   
5412    #used the sorted keys to create the header and line data
5413    for k in keys:
5414#        print "%s = %s" %(k, kwargs[k])
5415        header = header+str(k)
5416        line = line+str(kwargs[k])
5417        count+=1
5418        if count <len(kwargs):
5419            header = header+','
5420            line = line+','
5421    header+='\n'
5422    line+='\n'
5423
5424    # checks the header info, if the same, then write, if not create a new file
5425    #try to open!
5426    try:
5427        fid = open(file,"r")
5428        file_header=fid.readline()
5429        fid.close()
5430        if verbose: print 'read file header %s' %file_header
5431       
5432    except:
5433        msg = 'try to create new file',file
5434        if verbose: print msg
5435        #tries to open file, maybe directory is bad
5436        try:
5437            fid = open(file,"w")
5438            fid.write(header)
5439            fid.close()
5440            file_header=header
5441        except:
5442            msg = 'cannot create new file',file
5443            raise msg
5444           
5445    #if header is same or this is a new file
5446    if file_header==str(header):
5447        fid=open(file,"a")
5448        #write new line
5449        fid.write(line)
5450        fid.close()
5451    else:
5452        #backup plan,
5453        # if header is different and has completed will append info to
5454        #end of details_temp.cvs file in output directory
5455        file = str(kwargs['output_dir'])+'detail_temp.csv'
5456        fid=open(file,"a")
5457        fid.write(header)
5458        fid.write(line)
5459        fid.close()
5460        if verbose: print 'file',file_header.strip('\n')
5461        if verbose: print 'head',header.strip('\n')
5462        if file_header.strip('\n')==str(header): print 'they equal'
5463        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5464        print msg
5465
5466
5467
5468
5469
5470def get_maximum_inundation_elevation(filename,
5471                                     polygon=None,
5472                                     time_interval=None,
5473                                     verbose=False):
5474   
5475    """Return highest elevation where depth > 0
5476   
5477    Usage:
5478    max_runup = get_maximum_inundation_elevation(filename,
5479                                                 polygon=None,
5480                                                 time_interval=None,
5481                                                 verbose=False)
5482
5483    filename is a NetCDF sww file containing ANUGA model output.   
5484    Optional arguments polygon and time_interval restricts the maximum
5485    runup calculation
5486    to a points that lie within the specified polygon and time interval.
5487
5488    If no inundation is found within polygon and time_interval the return value
5489    is None signifying "No Runup" or "Everything is dry".
5490
5491    See general function get_maximum_inundation_data for details.
5492   
5493    """
5494   
5495    runup, _ = get_maximum_inundation_data(filename,
5496                                           polygon=polygon,
5497                                           time_interval=time_interval,
5498                                           verbose=verbose)
5499    return runup
5500
5501
5502
5503
5504def get_maximum_inundation_location(filename,
5505                                    polygon=None,
5506                                    time_interval=None,
5507                                    verbose=False):
5508    """Return location of highest elevation where h > 0
5509   
5510   
5511    Usage:
5512    max_runup_location = get_maximum_inundation_location(filename,
5513                                                         polygon=None,
5514                                                         time_interval=None,
5515                                                         verbose=False)
5516
5517    filename is a NetCDF sww file containing ANUGA model output.
5518    Optional arguments polygon and time_interval restricts the maximum
5519    runup calculation
5520    to a points that lie within the specified polygon and time interval.
5521
5522    If no inundation is found within polygon and time_interval the return value
5523    is None signifying "No Runup" or "Everything is dry".
5524
5525    See general function get_maximum_inundation_data for details.
5526    """
5527   
5528    _, max_loc = get_maximum_inundation_data(filename,
5529                                             polygon=polygon,
5530                                             time_interval=time_interval,
5531                                             verbose=verbose)
5532    return max_loc
5533   
5534
5535
5536def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
5537                                use_centroid_values=False,
5538                                verbose=False):
5539    """Compute maximum run up height from sww file.
5540
5541
5542    Usage:
5543    runup, location = get_maximum_inundation_data(filename,
5544                                                  polygon=None,
5545                                                  time_interval=None,
5546                                                  verbose=False)
5547   
5548
5549    Algorithm is as in get_maximum_inundation_elevation from
5550    shallow_water_domain
5551    except that this function works with the sww file and computes the maximal
5552    runup height over multiple timesteps.
5553   
5554    Optional arguments polygon and time_interval restricts the
5555    maximum runup calculation
5556    to a points that lie within the specified polygon and time interval.
5557    Polygon is
5558    assumed to be in (absolute) UTM coordinates in the same zone as domain.
5559
5560    If no inundation is found within polygon and time_interval the return value
5561    is None signifying "No Runup" or "Everything is dry".
5562    """
5563
5564    # We are using nodal values here as that is what is stored in sww files.
5565
5566    # Water depth below which it is considered to be 0 in the model
5567    # FIXME (Ole): Allow this to be specified as a keyword argument as well
5568
5569    from anuga.utilities.polygon import inside_polygon   
5570    from anuga.config import minimum_allowed_height
5571    from Scientific.IO.NetCDF import NetCDFFile
5572
5573    dir, base = os.path.split(filename)
5574           
5575    iterate_over = get_all_swwfiles(dir,base)
5576   
5577    # Read sww file
5578    if verbose: 
5579        print 'Reading from %s' %filename
5580        # FIXME: Use general swwstats (when done)
5581   
5582    maximal_runup = None
5583    maximal_runup_location = None
5584   
5585    for file, swwfile in enumerate (iterate_over):
5586       
5587        # Read sww file
5588        filename = join(dir,swwfile+'.sww')
5589       
5590        if verbose: 
5591            print 'Reading from %s' %filename
5592            # FIXME: Use general swwstats (when done)
5593               
5594        fid = NetCDFFile(filename)
5595   
5596        # Get geo_reference
5597        # sww files don't have to have a geo_ref
5598        try:
5599            geo_reference = Geo_reference(NetCDFObject=fid)
5600        except AttributeError, e:
5601            geo_reference = Geo_reference() # Default georef object
5602           
5603        xllcorner = geo_reference.get_xllcorner()
5604        yllcorner = geo_reference.get_yllcorner()
5605        zone = geo_reference.get_zone()
5606       
5607        # Get extent
5608        volumes = fid.variables['volumes'][:]   
5609        x = fid.variables['x'][:] + xllcorner
5610        y = fid.variables['y'][:] + yllcorner
5611   
5612   
5613        # Get the relevant quantities
5614        elevation = fid.variables['elevation'][:] 
5615        stage = fid.variables['stage'][:]
5616   
5617   
5618        # Here's where one could convert nodal information to centroid
5619        # information
5620        # but is probably something we need to write in C.
5621        # Here's a Python thought which is NOT finished!!!
5622        if use_centroid_values is True:
5623            x = get_centroid_values(x, volumes)
5624            y = get_centroid_values(y, volumes)   
5625            elevation = get_centroid_values(elevation, volumes)   
5626   
5627   
5628        # Spatial restriction
5629        if polygon is not None:
5630            msg = 'polygon must be a sequence of points.'
5631            assert len(polygon[0]) == 2, msg
5632            # FIXME (Ole): Make a generic polygon input check in polygon.py
5633            # and call it here
5634           
5635            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
5636   
5637            point_indices = inside_polygon(points, polygon)
5638   
5639            # Restrict quantities to polygon
5640            elevation = take(elevation, point_indices)
5641            stage = take(stage, point_indices, axis=1)
5642   
5643            # Get info for location of maximal runup
5644            points_in_polygon = take(points, point_indices)
5645            x = points_in_polygon[:,0]
5646            y = points_in_polygon[:,1]       
5647        else:
5648            # Take all points
5649            point_indices = arange(len(x))
5650           
5651   
5652        # Temporal restriction
5653        time = fid.variables['time'][:]
5654        all_timeindices = arange(len(time))       
5655        if time_interval is not None:
5656           
5657            msg = 'time_interval must be a sequence of length 2.'
5658            assert len(time_interval) == 2, msg
5659            msg = 'time_interval %s must not be decreasing.' %(time_interval)
5660            assert time_interval[1] >= time_interval[0], msg
5661           
5662            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
5663            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
5664                   %(time[0], time[-1])
5665            if time_interval[1] < time[0]: raise ValueError(msg)
5666            if time_interval[0] > time[-1]: raise ValueError(msg)
5667   
5668            # Take time indices corresponding to interval (& is bitwise AND)
5669            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
5670                                 all_timeindices)
5671   
5672   
5673            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
5674            assert not alltrue(timesteps == 0), msg
5675   
5676   
5677        else:
5678            # Take them all
5679            timesteps = all_timeindices
5680       
5681   
5682        fid.close()
5683   
5684        # Compute maximal runup for each timestep
5685        #maximal_runup = None
5686        #maximal_runup_location = None
5687        #maximal_runups = [None]
5688        #maximal_runup_locations = [None]
5689       
5690        for i in timesteps:
5691            if use_centroid_values is True:
5692                stage_i = get_centroid_values(stage[i,:], volumes)   
5693            else:
5694                stage_i = stage[i,:]
5695               
5696            depth = stage_i  - elevation
5697       
5698            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
5699            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
5700   
5701            if alltrue(wet_nodes == 0):
5702                runup = None
5703            else:   
5704                # Find maximum elevation among wet nodes
5705                wet_elevation = take(elevation, wet_nodes)
5706   
5707                runup_index = argmax(wet_elevation)
5708                runup = max(wet_elevation)
5709                assert wet_elevation[runup_index] == runup # Must always be True
5710            if runup > maximal_runup:
5711                maximal_runup = runup      # This works even if maximal_runups is None
5712                #print "NEW RUNUP",runup
5713   
5714                # Record location
5715                wet_x = take(x, wet_nodes)
5716                wet_y = take(y, wet_nodes)           
5717                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
5718   
5719    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
5720    return maximal_runup, maximal_runup_location
5721
5722def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
5723    '''
5724    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5725    will accept base_name with or without the extension ".sww"
5726   
5727    Returns: a list of strings
5728       
5729    Usage:     iterate_over = get_all_swwfiles(dir, name)
5730    then
5731               for swwfile in iterate_over:
5732                   do stuff
5733                   
5734    Check "export_grids" and "get_maximum_inundation_data" for examples
5735    '''
5736   
5737    #plus tests the extension
5738    name, extension = os.path.splitext(base_name)
5739
5740    if extension <>'' and extension <> '.sww':
5741        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
5742        raise IOError, msg
5743
5744    if look_in_dir == "":
5745        look_in_dir = "." # Unix compatibility
5746   
5747    dir_ls = os.listdir(look_in_dir)
5748    #print 'dir_ls',dir_ls, base
5749    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
5750    if len(iterate_over) == 0:
5751        msg = 'No files of the base name %s'\
5752              %(name)
5753        raise IOError, msg
5754    if verbose: print 'iterate over %s' %(iterate_over)
5755
5756    return iterate_over
5757
5758def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
5759    '''
5760    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5761   
5762   
5763    Returns: a list of strings
5764       
5765    Usage:     iterate_over = get_all_swwfiles(dir, name)
5766    then
5767               for swwfile in iterate_over:
5768                   do stuff
5769                   
5770    Check "export_grids" and "get_maximum_inundation_data" for examples
5771    '''
5772   
5773    #plus tests the extension
5774    name, ext = os.path.splitext(base_name)
5775#    print 'look_in_dir',look_in_dir
5776
5777    if ext <>'' and ext <> extension:
5778        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
5779        raise IOError, msg
5780
5781    if look_in_dir == "":
5782        look_in_dir = "." # Unix compatibility
5783#    print 'look_in_dir',look_in_dir, getcwd()
5784    dir_ls = os.listdir(look_in_dir)
5785    #print 'dir_ls',dir_ls, base_name
5786    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
5787    if len(iterate_over) == 0:
5788        msg = 'No files of the base name %s in %s'\
5789              %(name, look_in_dir)
5790        raise IOError, msg
5791    if verbose: print 'iterate over %s' %(iterate_over)
5792
5793    return iterate_over
5794
5795def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
5796    '''
5797    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5798   
5799   
5800    Returns: a list of strings
5801       
5802    Usage:     iterate_over = get_all_swwfiles(dir, name)
5803    then
5804               for swwfile in iterate_over:
5805                   do stuff
5806                   
5807    Check "export_grids" and "get_maximum_inundation_data" for examples
5808    '''
5809   
5810    #plus tests the extension
5811
5812    if look_in_dir == "":
5813        look_in_dir = "." # Unix compatibility
5814#    print 'look_in_dir',look_in_dir
5815    dir_ls = os.listdir(look_in_dir)
5816#    print 'dir_ls',dir_ls
5817    iterate_over = [x for x in dir_ls if base_name in x]
5818    if len(iterate_over) == 0:
5819        msg = 'No files of the base name %s'\
5820              %(name)
5821        raise IOError, msg
5822    if verbose: print 'iterate over %s' %(iterate_over)
5823
5824    return iterate_over
5825
5826
5827#-------------------------------------------------------------
5828if __name__ == "__main__":
5829    #setting umask from config to force permissions for all files and directories
5830    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
5831    # set in your .bashrc etc file)
5832    from config import umask
5833    import os 
5834    os.umask(umask)
5835
Note: See TracBrowser for help on using the repository browser.