source: anuga_core/source/anuga/shallow_water/data_manager.py @ 5224

Last change on this file since 5224 was 5189, checked in by duncan, 17 years ago

adding hacky function, points2polygon

File size: 191.3 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.csv: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57class NewQuantity(exceptions.Exception): pass
58
59
60
61import csv
62import os, sys
63import shutil
64from struct import unpack
65import array as p_array
66#import time, os
67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
68
69
70from Numeric import concatenate, array, Float, Int, Int32, resize, \
71     sometrue, searchsorted, zeros, allclose, around, reshape, \
72     transpose, sort, NewAxis, ArrayType, compress, take, arange, \
73     argmax, alltrue, shape, Float32, size
74
75import string
76
77from Scientific.IO.NetCDF import NetCDFFile
78#from shutil import copy
79from os.path import exists, basename, join
80from os import getcwd
81
82
83from anuga.coordinate_transforms.redfearn import redfearn, \
84     convert_from_latlon_to_utm
85from anuga.coordinate_transforms.geo_reference import Geo_reference, \
86     write_NetCDF_georeference, ensure_geo_reference
87from anuga.geospatial_data.geospatial_data import Geospatial_data,\
88     ensure_absolute
89from anuga.config import minimum_storable_height as default_minimum_storable_height
90from anuga.config import max_float
91from anuga.utilities.numerical_tools import ensure_numeric,  mean
92from anuga.caching.caching import myhash
93from anuga.utilities.anuga_exceptions import ANUGAError
94from anuga.shallow_water import Domain
95from anuga.abstract_2d_finite_volumes.pmesh2domain import \
96     pmesh_to_domain_instance
97from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
98     remove_lone_verts, sww2timeseries, get_centroid_values
99from anuga.load_mesh.loadASCII import export_mesh_file
100# formula mappings
101
102quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
103                    'depth':'stage-elevation',
104                    'speed': \
105 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
106
107
108   
109def make_filename(s):
110    """Transform argument string into a Sexsuitable filename
111    """
112
113    s = s.strip()
114    s = s.replace(' ', '_')
115    s = s.replace('(', '')
116    s = s.replace(')', '')
117    s = s.replace('__', '_')
118
119    return s
120
121
122def check_dir(path, verbose=None):
123    """Check that specified path exists.
124    If path does not exist it will be created if possible
125
126    USAGE:
127       checkdir(path, verbose):
128
129    ARGUMENTS:
130        path -- Directory
131        verbose -- Flag verbose output (default: None)
132
133    RETURN VALUE:
134        Verified path including trailing separator
135
136    """
137
138    import os.path
139
140    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
141        unix = 0
142    else:
143        unix = 1
144
145
146    if path[-1] != os.sep:
147        path = path + os.sep  # Add separator for directories
148
149    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
150    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
151        try:
152            exitcode=os.mkdir(path)
153
154            # Change access rights if possible
155            #
156            if unix:
157                exitcode=os.system('chmod 775 '+path)
158            else:
159                pass  # FIXME: What about acces rights under Windows?
160
161            if verbose: print 'MESSAGE: Directory', path, 'created.'
162
163        except:
164            print 'WARNING: Directory', path, 'could not be created.'
165            if unix:
166                path = '/tmp/'
167            else:
168                path = 'C:'
169
170            print 'Using directory %s instead' %path
171
172    return(path)
173
174
175
176def del_dir(path):
177    """Recursively delete directory path and all its contents
178    """
179
180    import os
181
182    if os.path.isdir(path):
183        for file in os.listdir(path):
184            X = os.path.join(path, file)
185
186
187            if os.path.isdir(X) and not os.path.islink(X):
188                del_dir(X)
189            else:
190                try:
191                    os.remove(X)
192                except:
193                    print "Could not remove file %s" %X
194
195        os.rmdir(path)
196       
197       
198# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
199def rmgeneric(path, __func__,verbose=False):
200    ERROR_STR= """Error removing %(path)s, %(error)s """
201
202    try:
203        __func__(path)
204        if verbose: print 'Removed ', path
205    except OSError, (errno, strerror):
206        print ERROR_STR % {'path' : path, 'error': strerror }
207           
208def removeall(path,verbose=False):
209
210    if not os.path.isdir(path):
211        return
212   
213    files=os.listdir(path)
214
215    for x in files:
216        fullpath=os.path.join(path, x)
217        if os.path.isfile(fullpath):
218            f=os.remove
219            rmgeneric(fullpath, f)
220        elif os.path.isdir(fullpath):
221            removeall(fullpath)
222            f=os.rmdir
223            rmgeneric(fullpath, f,verbose)
224
225
226
227def create_filename(datadir, filename, format, size=None, time=None):
228
229    import os
230    #from anuga.config import data_dir
231
232    FN = check_dir(datadir) + filename
233
234    if size is not None:
235        FN += '_size%d' %size
236
237    if time is not None:
238        FN += '_time%.2f' %time
239
240    FN += '.' + format
241    return FN
242
243
244def get_files(datadir, filename, format, size):
245    """Get all file (names) with given name, size and format
246    """
247
248    import glob
249
250    import os
251    #from anuga.config import data_dir
252
253    dir = check_dir(datadir)
254
255    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
256    return glob.glob(pattern)
257
258
259
260#Generic class for storing output to e.g. visualisation or checkpointing
261class Data_format:
262    """Generic interface to data formats
263    """
264
265
266    def __init__(self, domain, extension, mode = 'w'):
267        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
268                                        '''   'w' (write)'''+\
269                                        '''   'r' (read)''' +\
270                                        '''   'a' (append)'''
271
272        #Create filename
273        self.filename = create_filename(domain.get_datadir(),
274                                        domain.get_name(), extension)
275
276        #print 'F', self.filename
277        self.timestep = 0
278        self.domain = domain
279       
280
281
282        # Exclude ghosts in case this is a parallel domain
283        self.number_of_nodes = domain.number_of_full_nodes       
284        self.number_of_volumes = domain.number_of_full_triangles
285        #self.number_of_volumes = len(domain)       
286
287
288
289
290        #FIXME: Should we have a general set_precision function?
291
292
293
294#Class for storing output to e.g. visualisation
295class Data_format_sww(Data_format):
296    """Interface to native NetCDF format (.sww) for storing model output
297
298    There are two kinds of data
299
300    1: Constant data: Vertex coordinates and field values. Stored once
301    2: Variable data: Conserved quantities. Stored once per timestep.
302
303    All data is assumed to reside at vertex locations.
304    """
305
306
307    def __init__(self, domain, mode = 'w',\
308                 max_size = 2000000000,
309                 recursion = False):
310        from Scientific.IO.NetCDF import NetCDFFile
311        from Numeric import Int, Float, Float32
312
313        self.precision = Float32 #Use single precision for quantities
314        if hasattr(domain, 'max_size'):
315            self.max_size = domain.max_size #file size max is 2Gig
316        else:
317            self.max_size = max_size
318        self.recursion = recursion
319        self.mode = mode
320
321        Data_format.__init__(self, domain, 'sww', mode)
322
323        if hasattr(domain, 'minimum_storable_height'):
324            self.minimum_storable_height = domain.minimum_storable_height
325        else:
326            self.minimum_storable_height = default_minimum_storable_height
327
328        # NetCDF file definition
329        fid = NetCDFFile(self.filename, mode)
330
331        if mode == 'w':
332            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
333            self.writer = Write_sww()
334            self.writer.store_header(fid,
335                                     domain.starttime,
336                                     self.number_of_volumes,
337                                     self.domain.number_of_full_nodes,
338                                     description=description,
339                                     smoothing=domain.smooth,
340                                     order=domain.default_order,
341                                     sww_precision=self.precision)
342
343            # Extra optional information
344            if hasattr(domain, 'texture'):
345                fid.texture = domain.texture
346
347            if domain.quantities_to_be_monitored is not None:
348                fid.createDimension('singleton', 1)
349                fid.createDimension('two', 2)               
350
351                poly = domain.monitor_polygon
352                if poly is not None:
353                    N = len(poly)
354                    fid.createDimension('polygon_length', N)
355                    fid.createVariable('extrema.polygon',
356                                       self.precision,
357                                       ('polygon_length',
358                                        'two'))
359                    fid.variables['extrema.polygon'][:] = poly                                   
360
361                   
362                interval = domain.monitor_time_interval
363                if interval is not None:
364                    fid.createVariable('extrema.time_interval',
365                                       self.precision,
366                                       ('two',))
367                    fid.variables['extrema.time_interval'][:] = interval
368
369               
370                for q in domain.quantities_to_be_monitored:
371                    #print 'doing', q
372                    fid.createVariable(q+'.extrema', self.precision,
373                                       ('numbers_in_range',))
374                    fid.createVariable(q+'.min_location', self.precision,
375                                       ('numbers_in_range',))
376                    fid.createVariable(q+'.max_location', self.precision,
377                                       ('numbers_in_range',))
378                    fid.createVariable(q+'.min_time', self.precision,
379                                       ('singleton',))
380                    fid.createVariable(q+'.max_time', self.precision,
381                                       ('singleton',))
382
383                   
384        fid.close()
385
386
387    def store_connectivity(self):
388        """Specialisation of store_connectivity for net CDF format
389
390        Writes x,y,z coordinates of triangles constituting
391        the bed elevation.
392        """
393
394        from Scientific.IO.NetCDF import NetCDFFile
395
396        from Numeric import concatenate, Int
397
398        domain = self.domain
399
400        #Get NetCDF
401        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
402
403        # Get the variables
404        x = fid.variables['x']
405        y = fid.variables['y']
406        z = fid.variables['elevation']
407
408        volumes = fid.variables['volumes']
409
410        # Get X, Y and bed elevation Z
411        Q = domain.quantities['elevation']
412        X,Y,Z,V = Q.get_vertex_values(xy=True,
413                                      precision=self.precision)
414
415        #
416        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
417        self.writer.store_triangulation(fid,
418                                        points,
419                                        V.astype(volumes.typecode()),
420                                        Z,
421                                        points_georeference= \
422                                        domain.geo_reference)
423
424        # Close
425        fid.close()
426
427
428    def store_timestep(self, names=None):
429        """Store time and named quantities to file
430        """
431       
432        from Scientific.IO.NetCDF import NetCDFFile
433        import types
434        from time import sleep
435        from os import stat
436
437        from Numeric import choose
438
439
440        if names is None:
441            # Standard shallow water wave equation quantitites in ANUGA
442            names = ['stage', 'xmomentum', 'ymomentum']
443       
444        # Get NetCDF       
445        retries = 0
446        file_open = False
447        while not file_open and retries < 10:
448            try:
449                fid = NetCDFFile(self.filename, 'a') # Open existing file
450            except IOError:
451                # This could happen if someone was reading the file.
452                # In that case, wait a while and try again
453                msg = 'Warning (store_timestep): File %s could not be opened'\
454                      %self.filename
455                msg += ' - trying step %s again' %self.domain.time
456                print msg
457                retries += 1
458                sleep(1)
459            else:
460                file_open = True
461
462        if not file_open:
463            msg = 'File %s could not be opened for append' %self.filename
464            raise DataFileNotOpenError, msg
465
466
467
468        # Check to see if the file is already too big:
469        time = fid.variables['time']
470        i = len(time)+1
471        file_size = stat(self.filename)[6]
472        file_size_increase =  file_size/i
473        if file_size + file_size_increase > self.max_size*(2**self.recursion):
474            # In order to get the file name and start time correct,
475            # I change the domain.filename and domain.starttime.
476            # This is the only way to do this without changing
477            # other modules (I think).
478
479            # Write a filename addon that won't break swollens reader
480            # (10.sww is bad)
481            filename_ext = '_time_%s'%self.domain.time
482            filename_ext = filename_ext.replace('.', '_')
483           
484            # Remember the old filename, then give domain a
485            # name with the extension
486            old_domain_filename = self.domain.get_name()
487            if not self.recursion:
488                self.domain.set_name(old_domain_filename+filename_ext)
489
490
491            # Change the domain starttime to the current time
492            old_domain_starttime = self.domain.starttime
493            self.domain.starttime = self.domain.time
494
495            # Build a new data_structure.
496            next_data_structure=\
497                Data_format_sww(self.domain, mode=self.mode,\
498                                max_size = self.max_size,\
499                                recursion = self.recursion+1)
500            if not self.recursion:
501                print '    file_size = %s'%file_size
502                print '    saving file to %s'%next_data_structure.filename
503            #set up the new data_structure
504            self.domain.writer = next_data_structure
505
506            #FIXME - could be cleaner to use domain.store_timestep etc.
507            next_data_structure.store_connectivity()
508            next_data_structure.store_timestep(names)
509            fid.sync()
510            fid.close()
511
512            #restore the old starttime and filename
513            self.domain.starttime = old_domain_starttime
514            self.domain.set_name(old_domain_filename)           
515        else:
516            self.recursion = False
517            domain = self.domain
518
519            # Get the variables
520            time = fid.variables['time']
521            stage = fid.variables['stage']
522            xmomentum = fid.variables['xmomentum']
523            ymomentum = fid.variables['ymomentum']
524            i = len(time)
525            if type(names) not in [types.ListType, types.TupleType]:
526                names = [names]
527
528            if 'stage' in names and 'xmomentum' in names and \
529               'ymomentum' in names:
530
531                # Get stage, elevation, depth and select only those
532                # values where minimum_storable_height is exceeded
533                Q = domain.quantities['stage']
534                A, _ = Q.get_vertex_values(xy = False,
535                                           precision = self.precision)
536                z = fid.variables['elevation']
537
538                storable_indices = A-z[:] >= self.minimum_storable_height
539                stage = choose(storable_indices, (z[:], A))
540               
541                # Define a zero vector of same size and type as A
542                # for use with momenta
543                null = zeros(size(A), A.typecode())
544               
545                # Get xmomentum where depth exceeds minimum_storable_height
546                Q = domain.quantities['xmomentum']
547                xmom, _ = Q.get_vertex_values(xy = False,
548                                              precision = self.precision)
549                xmomentum = choose(storable_indices, (null, xmom))
550               
551
552                # Get ymomentum where depth exceeds minimum_storable_height
553                Q = domain.quantities['ymomentum']
554                ymom, _ = Q.get_vertex_values(xy = False,
555                                              precision = self.precision)
556                ymomentum = choose(storable_indices, (null, ymom))               
557               
558                # Write quantities to NetCDF
559                self.writer.store_quantities(fid, 
560                                             time=self.domain.time,
561                                             sww_precision=self.precision,
562                                             stage=stage,
563                                             xmomentum=xmomentum,
564                                             ymomentum=ymomentum)
565            else:
566                msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
567                msg += ' Instead I got: ' + str(names)
568                raise Exception, msg
569           
570
571
572            # Update extrema if requested
573            domain = self.domain
574            if domain.quantities_to_be_monitored is not None:
575                for q, info in domain.quantities_to_be_monitored.items():
576
577                    if info['min'] is not None:
578                        fid.variables[q + '.extrema'][0] = info['min']
579                        fid.variables[q + '.min_location'][:] =\
580                                        info['min_location']
581                        fid.variables[q + '.min_time'][0] = info['min_time']
582                       
583                    if info['max'] is not None:
584                        fid.variables[q + '.extrema'][1] = info['max']
585                        fid.variables[q + '.max_location'][:] =\
586                                        info['max_location']
587                        fid.variables[q + '.max_time'][0] = info['max_time']
588
589           
590
591            # Flush and close
592            fid.sync()
593            fid.close()
594
595
596
597# Class for handling checkpoints data
598class Data_format_cpt(Data_format):
599    """Interface to native NetCDF format (.cpt)
600    """
601
602
603    def __init__(self, domain, mode = 'w'):
604        from Scientific.IO.NetCDF import NetCDFFile
605        from Numeric import Int, Float, Float
606
607        self.precision = Float #Use full precision
608
609        Data_format.__init__(self, domain, 'sww', mode)
610
611
612        # NetCDF file definition
613        fid = NetCDFFile(self.filename, mode)
614
615        if mode == 'w':
616            #Create new file
617            fid.institution = 'Geoscience Australia'
618            fid.description = 'Checkpoint data'
619            #fid.smooth = domain.smooth
620            fid.order = domain.default_order
621
622            # dimension definitions
623            fid.createDimension('number_of_volumes', self.number_of_volumes)
624            fid.createDimension('number_of_vertices', 3)
625
626            #Store info at all vertices (no smoothing)
627            fid.createDimension('number_of_points', 3*self.number_of_volumes)
628            fid.createDimension('number_of_timesteps', None) #extensible
629
630            # variable definitions
631
632            #Mesh
633            fid.createVariable('x', self.precision, ('number_of_points',))
634            fid.createVariable('y', self.precision, ('number_of_points',))
635
636
637            fid.createVariable('volumes', Int, ('number_of_volumes',
638                                                'number_of_vertices'))
639
640            fid.createVariable('time', self.precision,
641                               ('number_of_timesteps',))
642
643            #Allocate space for all quantities
644            for name in domain.quantities.keys():
645                fid.createVariable(name, self.precision,
646                                   ('number_of_timesteps',
647                                    'number_of_points'))
648
649        #Close
650        fid.close()
651
652
653    def store_checkpoint(self):
654        """
655        Write x,y coordinates of triangles.
656        Write connectivity (
657        constituting
658        the bed elevation.
659        """
660
661        from Scientific.IO.NetCDF import NetCDFFile
662
663        from Numeric import concatenate
664
665        domain = self.domain
666
667        #Get NetCDF
668        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
669
670        # Get the variables
671        x = fid.variables['x']
672        y = fid.variables['y']
673
674        volumes = fid.variables['volumes']
675
676        # Get X, Y and bed elevation Z
677        Q = domain.quantities['elevation']
678        X,Y,Z,V = Q.get_vertex_values(xy=True,
679                      precision = self.precision)
680
681
682
683        x[:] = X.astype(self.precision)
684        y[:] = Y.astype(self.precision)
685        z[:] = Z.astype(self.precision)
686
687        volumes[:] = V
688
689        #Close
690        fid.close()
691
692
693    def store_timestep(self, name):
694        """Store time and named quantity to file
695        """
696        from Scientific.IO.NetCDF import NetCDFFile
697        from time import sleep
698
699        #Get NetCDF
700        retries = 0
701        file_open = False
702        while not file_open and retries < 10:
703            try:
704                fid = NetCDFFile(self.filename, 'a') #Open existing file
705            except IOError:
706                #This could happen if someone was reading the file.
707                #In that case, wait a while and try again
708                msg = 'Warning (store_timestep): File %s could not be opened'\
709                  %self.filename
710                msg += ' - trying again'
711                print msg
712                retries += 1
713                sleep(1)
714            else:
715                file_open = True
716
717        if not file_open:
718            msg = 'File %s could not be opened for append' %self.filename
719            raise DataFileNotOPenError, msg
720
721
722        domain = self.domain
723
724        # Get the variables
725        time = fid.variables['time']
726        stage = fid.variables['stage']
727        i = len(time)
728
729        #Store stage
730        time[i] = self.domain.time
731
732        # Get quantity
733        Q = domain.quantities[name]
734        A,V = Q.get_vertex_values(xy=False,
735                                  precision = self.precision)
736
737        stage[i,:] = A.astype(self.precision)
738
739        #Flush and close
740        fid.sync()
741        fid.close()
742
743
744#### NED is national exposure database (name changed to NEXIS)
745   
746LAT_TITLE = 'LATITUDE'
747LONG_TITLE = 'LONGITUDE'
748X_TITLE = 'x'
749Y_TITLE = 'y'
750class Exposure_csv:
751    def __init__(self,file_name, latitude_title=LAT_TITLE,
752                 longitude_title=LONG_TITLE, is_x_y_locations=None,
753                 x_title=X_TITLE, y_title=Y_TITLE,
754                 refine_polygon=None, title_check_list=None):
755        """
756        This class is for handling the exposure csv file.
757        It reads the file in and converts the lats and longs to a geospatial
758        data object.
759        Use the methods to read and write columns.
760
761        The format of the csv files it reads is;
762           The first row is a title row.
763           comma's are the delimiters
764           each column is a 'set' of data
765
766        Feel free to use/expand it to read other csv files.
767           
768           
769        It is not for adding and deleting rows
770       
771        Can geospatial handle string attributes? It's not made for them.
772        Currently it can't load and save string att's.
773
774        So just use geospatial to hold the x, y and georef? Bad, since
775        different att's are in diferent structures.  Not so bad, the info
776        to write if the .csv file is saved is in attribute_dic
777
778        The location info is in the geospatial attribute.
779       
780       
781        """
782        self._file_name = file_name
783        self._geospatial = None #
784
785        # self._attribute_dic is a dictionary.
786        #The keys are the column titles.
787        #The values are lists of column data
788       
789        # self._title_index_dic is a dictionary.
790        #The keys are the column titles.
791        #The values are the index positions of file columns.
792        self._attribute_dic, self._title_index_dic = \
793            csv2dict(self._file_name, title_check_list=title_check_list)
794        try:
795            #Have code here that handles caps or lower
796            lats = self._attribute_dic[latitude_title]
797            longs = self._attribute_dic[longitude_title]
798           
799        except KeyError:
800            # maybe a warning..
801            #Let's see if this works..
802            if False != is_x_y_locations:
803                is_x_y_locations = True
804            pass
805        else:
806            self._geospatial = Geospatial_data(latitudes = lats,
807                 longitudes = longs)
808
809        if is_x_y_locations is True:
810            if self._geospatial is not None:
811                pass #fixme throw an error
812            try:
813                xs = self._attribute_dic[x_title]
814                ys = self._attribute_dic[y_title]
815                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
816            except KeyError:
817                # maybe a warning..
818                msg = "Could not find location information."
819                raise TitleValueError, msg
820            else:
821                self._geospatial = Geospatial_data(data_points=points)
822           
823        # create a list of points that are in the refining_polygon
824        # described by a list of indexes representing the points
825
826    def __cmp__(self, other):
827        #print "self._attribute_dic",self._attribute_dic
828        #print "other._attribute_dic",other._attribute_dic
829        #print "self._title_index_dic", self._title_index_dic
830        #print "other._title_index_dic", other._title_index_dic
831       
832        #check that a is an instance of this class
833        if isinstance(self, type(other)):
834            result = cmp(self._attribute_dic, other._attribute_dic)
835            if result <>0:
836                return result
837            # The order of the columns is important. Therefore..
838            result = cmp(self._title_index_dic, other._title_index_dic)
839            if result <>0:
840                return result
841            for self_ls, other_ls in map(None,self._attribute_dic, \
842                    other._attribute_dic):
843                result = cmp(self._attribute_dic[self_ls],
844                             other._attribute_dic[other_ls])
845                if result <>0:
846                    return result
847            return 0
848        else:
849            return 1
850   
851
852    def get_column(self, column_name, use_refind_polygon=False):
853        """
854        Given a column name return a list of the column values
855
856        Note, the type of the values will be String!
857        do this to change a list of strings to a list of floats
858        time = [float(x) for x in time]
859       
860        Not implemented:
861        if use_refind_polygon is True, only return values in the
862        refined polygon
863        """
864        if not self._attribute_dic.has_key(column_name):
865            msg = 'Therer is  no column called %s!' %column_name
866            raise TitleValueError, msg
867        return self._attribute_dic[column_name]
868
869
870    def get_value(self, value_column_name,
871                  known_column_name,
872                  known_values,
873                  use_refind_polygon=False):
874        """
875        Do linear interpolation on the known_colum, using the known_value,
876        to return a value of the column_value_name.
877        """
878        pass
879
880
881    def get_location(self, use_refind_polygon=False):
882        """
883        Return a geospatial object which describes the
884        locations of the location file.
885
886        Note, if there is not location info, this returns None.
887       
888        Not implemented:
889        if use_refind_polygon is True, only return values in the
890        refined polygon
891        """
892        return self._geospatial
893
894    def set_column(self, column_name, column_values, overwrite=False):
895        """
896        Add a column to the 'end' (with the right most column being the end)
897        of the csv file.
898
899        Set overwrite to True if you want to overwrite a column.
900       
901        Note, in column_name white space is removed and case is not checked.
902        Precondition
903        The column_name and column_values cannot have comma's in it.
904        """
905       
906        value_row_count = \
907            len(self._attribute_dic[self._title_index_dic.keys()[0]])
908        if len(column_values) <> value_row_count: 
909            msg = 'The number of column values must equal the number of rows.'
910            raise DataMissingValuesError, msg
911       
912        if self._attribute_dic.has_key(column_name):
913            if not overwrite:
914                msg = 'Column name %s already in use!' %column_name
915                raise TitleValueError, msg
916        else:
917            # New title.  Add it to the title index.
918            self._title_index_dic[column_name] = len(self._title_index_dic)
919        self._attribute_dic[column_name] = column_values
920        #print "self._title_index_dic[column_name]",self._title_index_dic
921
922    def save(self, file_name=None):
923        """
924        Save the exposure csv file
925        """
926        if file_name is None:
927            file_name = self._file_name
928       
929        fd = open(file_name,'wb')
930        writer = csv.writer(fd)
931       
932        #Write the title to a cvs file
933        line = [None]* len(self._title_index_dic)
934        for title in self._title_index_dic.iterkeys():
935            line[self._title_index_dic[title]]= title
936        writer.writerow(line)
937       
938        # Write the values to a cvs file
939        value_row_count = \
940            len(self._attribute_dic[self._title_index_dic.keys()[0]])
941        for row_i in range(value_row_count):
942            line = [None]* len(self._title_index_dic)
943            for title in self._title_index_dic.iterkeys():
944                line[self._title_index_dic[title]]= \
945                     self._attribute_dic[title][row_i]
946            writer.writerow(line)
947
948
949def csv2dict(file_name, title_check_list=None):
950    """
951    Load in the csv as a dic, title as key and column info as value, .
952    Also, create a dic, title as key and column index as value,
953    to keep track of the column order.
954
955    Two dictionaries are returned.
956   
957    WARNING: Vaules are returned as strings.
958    do this to change a list of strings to a list of floats
959        time = [float(x) for x in time]
960
961       
962    """
963   
964    #
965    attribute_dic = {}
966    title_index_dic = {}
967    titles_stripped = [] # list of titles
968    reader = csv.reader(file(file_name))
969
970    # Read in and manipulate the title info
971    titles = reader.next()
972    for i,title in enumerate(titles):
973        titles_stripped.append(title.strip())
974        title_index_dic[title.strip()] = i
975    title_count = len(titles_stripped)       
976    #print "title_index_dic",title_index_dic
977    if title_check_list is not None:
978        for title_check in title_check_list:
979            #msg = "Reading error.  This row is not present ", title_check
980            #assert title_index_dic.has_key(title_check), msg
981            if not title_index_dic.has_key(title_check):
982                #reader.close()
983                msg = "Reading error.  This row is not present ", \
984                      title_check                     
985                raise IOError, msg
986               
987   
988   
989    #create a dic of colum values, indexed by column title
990    for line in reader:
991        if len(line) <> title_count:
992            raise IOError #FIXME make this nicer
993        for i, value in enumerate(line):
994            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
995       
996    return attribute_dic, title_index_dic
997
998
999#Auxiliary
1000def write_obj(filename,x,y,z):
1001    """Store x,y,z vectors into filename (obj format)
1002       Vectors are assumed to have dimension (M,3) where
1003       M corresponds to the number elements.
1004       triangles are assumed to be disconnected
1005
1006       The three numbers in each vector correspond to three vertices,
1007
1008       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1009
1010    """
1011    #print 'Writing obj to %s' % filename
1012
1013    import os.path
1014
1015    root, ext = os.path.splitext(filename)
1016    if ext == '.obj':
1017        FN = filename
1018    else:
1019        FN = filename + '.obj'
1020
1021
1022    outfile = open(FN, 'wb')
1023    outfile.write("# Triangulation as an obj file\n")
1024
1025    M, N = x.shape
1026    assert N==3  #Assuming three vertices per element
1027
1028    for i in range(M):
1029        for j in range(N):
1030            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1031
1032    for i in range(M):
1033        base = i*N
1034        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1035
1036    outfile.close()
1037
1038
1039#########################################################
1040#Conversion routines
1041########################################################
1042
1043def sww2obj(basefilename, size):
1044    """Convert netcdf based data output to obj
1045    """
1046    from Scientific.IO.NetCDF import NetCDFFile
1047
1048    from Numeric import Float, zeros
1049
1050    #Get NetCDF
1051    FN = create_filename('.', basefilename, 'sww', size)
1052    print 'Reading from ', FN
1053    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1054
1055
1056    # Get the variables
1057    x = fid.variables['x']
1058    y = fid.variables['y']
1059    z = fid.variables['elevation']
1060    time = fid.variables['time']
1061    stage = fid.variables['stage']
1062
1063    M = size  #Number of lines
1064    xx = zeros((M,3), Float)
1065    yy = zeros((M,3), Float)
1066    zz = zeros((M,3), Float)
1067
1068    for i in range(M):
1069        for j in range(3):
1070            xx[i,j] = x[i+j*M]
1071            yy[i,j] = y[i+j*M]
1072            zz[i,j] = z[i+j*M]
1073
1074    #Write obj for bathymetry
1075    FN = create_filename('.', basefilename, 'obj', size)
1076    write_obj(FN,xx,yy,zz)
1077
1078
1079    #Now read all the data with variable information, combine with
1080    #x,y info and store as obj
1081
1082    for k in range(len(time)):
1083        t = time[k]
1084        print 'Processing timestep %f' %t
1085
1086        for i in range(M):
1087            for j in range(3):
1088                zz[i,j] = stage[k,i+j*M]
1089
1090
1091        #Write obj for variable data
1092        #FN = create_filename(basefilename, 'obj', size, time=t)
1093        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1094        write_obj(FN,xx,yy,zz)
1095
1096
1097def dat2obj(basefilename):
1098    """Convert line based data output to obj
1099    FIXME: Obsolete?
1100    """
1101
1102    import glob, os
1103    from anuga.config import data_dir
1104
1105
1106    #Get bathymetry and x,y's
1107    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1108
1109    from Numeric import zeros, Float
1110
1111    M = len(lines)  #Number of lines
1112    x = zeros((M,3), Float)
1113    y = zeros((M,3), Float)
1114    z = zeros((M,3), Float)
1115
1116    ##i = 0
1117    for i, line in enumerate(lines):
1118        tokens = line.split()
1119        values = map(float,tokens)
1120
1121        for j in range(3):
1122            x[i,j] = values[j*3]
1123            y[i,j] = values[j*3+1]
1124            z[i,j] = values[j*3+2]
1125
1126        ##i += 1
1127
1128
1129    #Write obj for bathymetry
1130    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1131
1132
1133    #Now read all the data files with variable information, combine with
1134    #x,y info
1135    #and store as obj
1136
1137    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1138
1139    for filename in files:
1140        print 'Processing %s' % filename
1141
1142        lines = open(data_dir+os.sep+filename,'r').readlines()
1143        assert len(lines) == M
1144        root, ext = os.path.splitext(filename)
1145
1146        #Get time from filename
1147        i0 = filename.find('_time=')
1148        if i0 == -1:
1149            #Skip bathymetry file
1150            continue
1151
1152        i0 += 6  #Position where time starts
1153        i1 = filename.find('.dat')
1154
1155        if i1 > i0:
1156            t = float(filename[i0:i1])
1157        else:
1158            raise DataTimeError, 'Hmmmm'
1159
1160
1161
1162        ##i = 0
1163        for i, line in enumerate(lines):
1164            tokens = line.split()
1165            values = map(float,tokens)
1166
1167            for j in range(3):
1168                z[i,j] = values[j]
1169
1170            ##i += 1
1171
1172        #Write obj for variable data
1173        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1174
1175
1176def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1177    """Read netcdf filename1, pick timesteps first:step:last and save to
1178    nettcdf file filename2
1179    """
1180    from Scientific.IO.NetCDF import NetCDFFile
1181
1182    #Get NetCDF
1183    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1184    outfile = NetCDFFile(filename2, 'w')  #Open new file
1185
1186
1187    #Copy dimensions
1188    for d in infile.dimensions:
1189        outfile.createDimension(d, infile.dimensions[d])
1190
1191    for name in infile.variables:
1192        var = infile.variables[name]
1193        outfile.createVariable(name, var.typecode(), var.dimensions)
1194
1195
1196    #Copy the static variables
1197    for name in infile.variables:
1198        if name == 'time' or name == 'stage':
1199            pass
1200        else:
1201            #Copy
1202            outfile.variables[name][:] = infile.variables[name][:]
1203
1204    #Copy selected timesteps
1205    time = infile.variables['time']
1206    stage = infile.variables['stage']
1207
1208    newtime = outfile.variables['time']
1209    newstage = outfile.variables['stage']
1210
1211    if last is None:
1212        last = len(time)
1213
1214    selection = range(first, last, step)
1215    for i, j in enumerate(selection):
1216        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1217        newtime[i] = time[j]
1218        newstage[i,:] = stage[j,:]
1219
1220    #Close
1221    infile.close()
1222    outfile.close()
1223
1224
1225#Get data objects
1226def get_dataobject(domain, mode='w'):
1227    """Return instance of class of given format using filename
1228    """
1229
1230    cls = eval('Data_format_%s' %domain.format)
1231    return cls(domain, mode)
1232
1233
1234
1235
1236def dem2pts(basename_in, basename_out=None,
1237            easting_min=None, easting_max=None,
1238            northing_min=None, northing_max=None,
1239            use_cache=False, verbose=False,):
1240    """Read Digitial Elevation model from the following NetCDF format (.dem)
1241
1242    Example:
1243
1244    ncols         3121
1245    nrows         1800
1246    xllcorner     722000
1247    yllcorner     5893000
1248    cellsize      25
1249    NODATA_value  -9999
1250    138.3698 137.4194 136.5062 135.5558 ..........
1251
1252    Convert to NetCDF pts format which is
1253
1254    points:  (Nx2) Float array
1255    elevation: N Float array
1256    """
1257
1258
1259
1260    kwargs = {'basename_out': basename_out,
1261              'easting_min': easting_min,
1262              'easting_max': easting_max,
1263              'northing_min': northing_min,
1264              'northing_max': northing_max,
1265              'verbose': verbose}
1266
1267    if use_cache is True:
1268        from caching import cache
1269        result = cache(_dem2pts, basename_in, kwargs,
1270                       dependencies = [basename_in + '.dem'],
1271                       verbose = verbose)
1272
1273    else:
1274        result = apply(_dem2pts, [basename_in], kwargs)
1275
1276    return result
1277
1278
1279def _dem2pts(basename_in, basename_out=None, verbose=False,
1280            easting_min=None, easting_max=None,
1281            northing_min=None, northing_max=None):
1282    """Read Digitial Elevation model from the following NetCDF format (.dem)
1283
1284    Internal function. See public function dem2pts for details.
1285    """
1286
1287    # FIXME: Can this be written feasibly using write_pts?
1288
1289    import os
1290    from Scientific.IO.NetCDF import NetCDFFile
1291    from Numeric import Float, zeros, reshape, sum
1292
1293    root = basename_in
1294
1295    # Get NetCDF
1296    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
1297
1298    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1299
1300    ncols = infile.ncols[0]
1301    nrows = infile.nrows[0]
1302    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1303    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
1304    cellsize = infile.cellsize[0]
1305    NODATA_value = infile.NODATA_value[0]
1306    dem_elevation = infile.variables['elevation']
1307
1308    zone = infile.zone[0]
1309    false_easting = infile.false_easting[0]
1310    false_northing = infile.false_northing[0]
1311
1312    # Text strings
1313    projection = infile.projection
1314    datum = infile.datum
1315    units = infile.units
1316
1317
1318    # Get output file
1319    if basename_out == None:
1320        ptsname = root + '.pts'
1321    else:
1322        ptsname = basename_out + '.pts'
1323
1324    if verbose: print 'Store to NetCDF file %s' %ptsname
1325    # NetCDF file definition
1326    outfile = NetCDFFile(ptsname, 'w')
1327
1328    # Create new file
1329    outfile.institution = 'Geoscience Australia'
1330    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1331                      'of spatial point data'
1332    # Assign default values
1333    if easting_min is None: easting_min = xllcorner
1334    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1335    if northing_min is None: northing_min = yllcorner
1336    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1337
1338    # Compute offsets to update georeferencing
1339    easting_offset = xllcorner - easting_min
1340    northing_offset = yllcorner - northing_min
1341
1342    # Georeferencing
1343    outfile.zone = zone
1344    outfile.xllcorner = easting_min # Easting of lower left corner
1345    outfile.yllcorner = northing_min # Northing of lower left corner
1346    outfile.false_easting = false_easting
1347    outfile.false_northing = false_northing
1348
1349    outfile.projection = projection
1350    outfile.datum = datum
1351    outfile.units = units
1352
1353
1354    # Grid info (FIXME: probably not going to be used, but heck)
1355    outfile.ncols = ncols
1356    outfile.nrows = nrows
1357
1358    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1359    totalnopoints = nrows*ncols
1360
1361    # Calculating number of NODATA_values for each row in clipped region
1362    # FIXME: use array operations to do faster
1363    nn = 0
1364    k = 0
1365    i1_0 = 0
1366    j1_0 = 0
1367    thisj = 0
1368    thisi = 0
1369    for i in range(nrows):
1370        y = (nrows-i-1)*cellsize + yllcorner
1371        for j in range(ncols):
1372            x = j*cellsize + xllcorner
1373            if easting_min <= x <= easting_max and \
1374               northing_min <= y <= northing_max:
1375                thisj = j
1376                thisi = i
1377                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1378
1379                if k == 0:
1380                    i1_0 = i
1381                    j1_0 = j
1382                k += 1
1383
1384    index1 = j1_0
1385    index2 = thisj
1386
1387    # Dimension definitions
1388    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1389    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1390
1391    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1392    nopoints = clippednopoints-nn
1393
1394    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1395
1396    if verbose:
1397        print 'There are %d values in the elevation' %totalnopoints
1398        print 'There are %d values in the clipped elevation' %clippednopoints
1399        print 'There are %d NODATA_values in the clipped elevation' %nn
1400
1401    outfile.createDimension('number_of_points', nopoints)
1402    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1403
1404    # Variable definitions
1405    outfile.createVariable('points', Float, ('number_of_points',
1406                                             'number_of_dimensions'))
1407    outfile.createVariable('elevation', Float, ('number_of_points',))
1408
1409    # Get handles to the variables
1410    points = outfile.variables['points']
1411    elevation = outfile.variables['elevation']
1412
1413    lenv = index2-index1+1
1414    # Store data
1415    global_index = 0
1416    # for i in range(nrows):
1417    for i in range(i1_0,thisi+1,1):
1418        if verbose and i%((nrows+10)/10)==0:
1419            print 'Processing row %d of %d' %(i, nrows)
1420
1421        lower_index = global_index
1422
1423        v = dem_elevation_r[i,index1:index2+1]
1424        no_NODATA = sum(v == NODATA_value)
1425        if no_NODATA > 0:
1426            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
1427        else:
1428            newcols = lenv # ncols_in_bounding_box
1429
1430        telev = zeros(newcols, Float)
1431        tpoints = zeros((newcols, 2), Float)
1432
1433        local_index = 0
1434
1435        y = (nrows-i-1)*cellsize + yllcorner
1436        #for j in range(ncols):
1437        for j in range(j1_0,index2+1,1):
1438
1439            x = j*cellsize + xllcorner
1440            if easting_min <= x <= easting_max and \
1441               northing_min <= y <= northing_max and \
1442               dem_elevation_r[i,j] <> NODATA_value:
1443                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1444                telev[local_index] = dem_elevation_r[i, j]
1445                global_index += 1
1446                local_index += 1
1447
1448        upper_index = global_index
1449
1450        if upper_index == lower_index + newcols:
1451            points[lower_index:upper_index, :] = tpoints
1452            elevation[lower_index:upper_index] = telev
1453
1454    assert global_index == nopoints, 'index not equal to number of points'
1455
1456    infile.close()
1457    outfile.close()
1458
1459
1460
1461def _read_hecras_cross_sections(lines):
1462    """Return block of surface lines for each cross section
1463    Starts with SURFACE LINE,
1464    Ends with END CROSS-SECTION
1465    """
1466
1467    points = []
1468
1469    reading_surface = False
1470    for i, line in enumerate(lines):
1471
1472        if len(line.strip()) == 0:    #Ignore blanks
1473            continue
1474
1475        if lines[i].strip().startswith('SURFACE LINE'):
1476            reading_surface = True
1477            continue
1478
1479        if lines[i].strip().startswith('END') and reading_surface:
1480            yield points
1481            reading_surface = False
1482            points = []
1483
1484        if reading_surface:
1485            fields = line.strip().split(',')
1486            easting = float(fields[0])
1487            northing = float(fields[1])
1488            elevation = float(fields[2])
1489            points.append([easting, northing, elevation])
1490
1491
1492
1493
1494def hecras_cross_sections2pts(basename_in,
1495                              basename_out=None,
1496                              verbose=False):
1497    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1498
1499    Example:
1500
1501
1502# RAS export file created on Mon 15Aug2005 11:42
1503# by HEC-RAS Version 3.1.1
1504
1505BEGIN HEADER:
1506  UNITS: METRIC
1507  DTM TYPE: TIN
1508  DTM: v:\1\cit\perth_topo\river_tin
1509  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1510  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1511  MAP PROJECTION: UTM
1512  PROJECTION ZONE: 50
1513  DATUM: AGD66
1514  VERTICAL DATUM:
1515  NUMBER OF REACHES:  19
1516  NUMBER OF CROSS-SECTIONS:  14206
1517END HEADER:
1518
1519
1520Only the SURFACE LINE data of the following form will be utilised
1521
1522  CROSS-SECTION:
1523    STREAM ID:Southern-Wungong
1524    REACH ID:Southern-Wungong
1525    STATION:19040.*
1526    CUT LINE:
1527      405548.671603161 , 6438142.7594925
1528      405734.536092045 , 6438326.10404912
1529      405745.130459356 , 6438331.48627354
1530      405813.89633823 , 6438368.6272789
1531    SURFACE LINE:
1532     405548.67,   6438142.76,   35.37
1533     405552.24,   6438146.28,   35.41
1534     405554.78,   6438148.78,   35.44
1535     405555.80,   6438149.79,   35.44
1536     405559.37,   6438153.31,   35.45
1537     405560.88,   6438154.81,   35.44
1538     405562.93,   6438156.83,   35.42
1539     405566.50,   6438160.35,   35.38
1540     405566.99,   6438160.83,   35.37
1541     ...
1542   END CROSS-SECTION
1543
1544    Convert to NetCDF pts format which is
1545
1546    points:  (Nx2) Float array
1547    elevation: N Float array
1548    """
1549
1550    import os
1551    from Scientific.IO.NetCDF import NetCDFFile
1552    from Numeric import Float, zeros, reshape
1553
1554    root = basename_in
1555
1556    #Get ASCII file
1557    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1558
1559    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1560
1561    lines = infile.readlines()
1562    infile.close()
1563
1564    if verbose: print 'Converting to pts format'
1565
1566    i = 0
1567    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1568        i += 1
1569
1570    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1571    i += 1
1572
1573    assert lines[i].strip().upper().startswith('UNITS:')
1574    units = lines[i].strip().split()[1]
1575    i += 1
1576
1577    assert lines[i].strip().upper().startswith('DTM TYPE:')
1578    i += 1
1579
1580    assert lines[i].strip().upper().startswith('DTM:')
1581    i += 1
1582
1583    assert lines[i].strip().upper().startswith('STREAM')
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('CROSS')
1587    i += 1
1588
1589    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1590    projection = lines[i].strip().split(':')[1]
1591    i += 1
1592
1593    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1594    zone = int(lines[i].strip().split(':')[1])
1595    i += 1
1596
1597    assert lines[i].strip().upper().startswith('DATUM:')
1598    datum = lines[i].strip().split(':')[1]
1599    i += 1
1600
1601    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1602    i += 1
1603
1604    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1605    i += 1
1606
1607    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1608    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1609    i += 1
1610
1611
1612    #Now read all points
1613    points = []
1614    elevation = []
1615    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1616        for k, entry in enumerate(entries):
1617            points.append(entry[:2])
1618            elevation.append(entry[2])
1619
1620
1621    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1622          %(j+1, number_of_cross_sections)
1623    assert j+1 == number_of_cross_sections, msg
1624
1625    #Get output file
1626    if basename_out == None:
1627        ptsname = root + '.pts'
1628    else:
1629        ptsname = basename_out + '.pts'
1630
1631    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1632    geo = Geospatial_data(points, {"elevation":elevation},
1633                          verbose=verbose, geo_reference=geo_ref)
1634    geo.export_points_file(ptsname)
1635
1636def export_grid(basename_in, extra_name_out = None,
1637                quantities = None, # defaults to elevation
1638                timestep = None,
1639                reduction = None,
1640                cellsize = 10,
1641                NODATA_value = -9999,
1642                easting_min = None,
1643                easting_max = None,
1644                northing_min = None,
1645                northing_max = None,
1646                verbose = False,
1647                origin = None,
1648                datum = 'WGS84',
1649                format = 'ers'):
1650    """
1651   
1652    Wrapper for sww2dem. - see sww2dem to find out what most of the
1653    parameters do.
1654
1655    Quantities is a list of quantities.  Each quantity will be
1656    calculated for each sww file.
1657
1658    This returns the basenames of the files returned, which is made up
1659    of the dir and all of the file name, except the extension.
1660
1661    This function returns the names of the files produced.
1662
1663    It will also produce as many output files as there are input sww files.
1664    """
1665   
1666    if quantities is None:
1667        quantities = ['elevation']
1668       
1669    if type(quantities) is str:
1670            quantities = [quantities]
1671
1672    # How many sww files are there?
1673    dir, base = os.path.split(basename_in)
1674    #print "basename_in",basename_in
1675    #print "base",base
1676
1677    iterate_over = get_all_swwfiles(dir,base,verbose)
1678   
1679    if dir == "":
1680        dir = "." # Unix compatibility
1681   
1682    files_out = []
1683    #print 'sww_file',iterate_over
1684    for sww_file in iterate_over:
1685        for quantity in quantities:
1686            if extra_name_out is None:
1687                basename_out = sww_file + '_' + quantity
1688            else:
1689                basename_out = sww_file + '_' + quantity + '_' \
1690                               + extra_name_out
1691#            print "basename_out", basename_out
1692       
1693            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
1694                               quantity, 
1695                               timestep,
1696                               reduction,
1697                               cellsize,
1698                               NODATA_value,
1699                               easting_min,
1700                               easting_max,
1701                               northing_min,
1702                               northing_max,
1703                               verbose,
1704                               origin,
1705                               datum,
1706                               format)
1707            files_out.append(file_out)
1708    #print "basenames_out after",basenames_out
1709    return files_out
1710
1711
1712def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1713                   plot_quantity, generate_fig = False,
1714                   reportname = None, surface = False, time_min = None,
1715                   time_max = None, title_on = False, verbose = True,
1716                   nodes=None):
1717    """
1718    nodes - number of processes used.
1719
1720    warning - this function has no tests
1721    """
1722    if reportname == None:
1723        report = False
1724    else:
1725        report = True
1726       
1727    if nodes is None:
1728        is_parallel = False
1729    else:
1730        is_parallel = True
1731       
1732    # Generate figures
1733    swwfiles = {}
1734   
1735    if is_parallel is True:   
1736        for i in range(nodes):
1737            print 'Sending node %d of %d' %(i,nodes)
1738            swwfiles = {}
1739            if not reportname == None:
1740                reportname = report_name + '_%s' %(i)
1741            for label_id in production_dirs.keys():
1742                if label_id == 'boundaries':
1743                    swwfile = best_boundary_sww
1744                else:
1745                    file_loc = output_dir + label_id + sep
1746                    sww_extra = '_P%s_%s' %(i,nodes)
1747                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1748                    print 'swwfile',swwfile
1749                    swwfiles[swwfile] = label_id
1750
1751                texname, elev_output = sww2timeseries(swwfiles,
1752                                              gauges_dir_name,
1753                                              production_dirs,
1754                                              report = report,
1755                                              reportname = reportname,
1756                                              plot_quantity = plot_quantity,
1757                                              generate_fig = generate_fig,
1758                                              surface = surface,
1759                                              time_min = time_min,
1760                                              time_max = time_max,
1761                                              title_on = title_on,
1762                                              verbose = verbose)
1763    else:   
1764        for label_id in production_dirs.keys():       
1765            if label_id == 'boundaries':
1766                print 'boundaries'
1767                file_loc = project.boundaries_in_dir
1768                swwfile = project.boundaries_dir_name3 + '.sww'
1769                #  swwfile = boundary_dir_filename
1770            else:
1771                file_loc = output_dir + label_id + sep
1772                swwfile = file_loc + scenario_name + '.sww'
1773            swwfiles[swwfile] = label_id
1774       
1775        texname, elev_output = sww2timeseries(swwfiles,
1776                                              gauges_dir_name,
1777                                              production_dirs,
1778                                              report = report,
1779                                              reportname = reportname,
1780                                              plot_quantity = plot_quantity,
1781                                              generate_fig = generate_fig,
1782                                              surface = surface,
1783                                              time_min = time_min,
1784                                              time_max = time_max,
1785                                              title_on = title_on,
1786                                              verbose = verbose)
1787                                         
1788
1789   
1790def sww2dem(basename_in, basename_out = None,
1791            quantity = None, # defaults to elevation
1792            timestep = None,
1793            reduction = None,
1794            cellsize = 10,
1795            NODATA_value = -9999,
1796            easting_min = None,
1797            easting_max = None,
1798            northing_min = None,
1799            northing_max = None,
1800            verbose = False,
1801            origin = None,
1802            datum = 'WGS84',
1803            format = 'ers'):
1804
1805    """Read SWW file and convert to Digitial Elevation model format
1806    (.asc or .ers)
1807
1808    Example (ASC):
1809
1810    ncols         3121
1811    nrows         1800
1812    xllcorner     722000
1813    yllcorner     5893000
1814    cellsize      25
1815    NODATA_value  -9999
1816    138.3698 137.4194 136.5062 135.5558 ..........
1817
1818    Also write accompanying file with same basename_in but extension .prj
1819    used to fix the UTM zone, datum, false northings and eastings.
1820
1821    The prj format is assumed to be as
1822
1823    Projection    UTM
1824    Zone          56
1825    Datum         WGS84
1826    Zunits        NO
1827    Units         METERS
1828    Spheroid      WGS84
1829    Xshift        0.0000000000
1830    Yshift        10000000.0000000000
1831    Parameters
1832
1833
1834    The parameter quantity must be the name of an existing quantity or
1835    an expression involving existing quantities. The default is
1836    'elevation'. Quantity is not a list of quantities.
1837
1838    if timestep (an index) is given, output quantity at that timestep
1839
1840    if reduction is given use that to reduce quantity over all timesteps.
1841
1842    datum
1843
1844    format can be either 'asc' or 'ers'
1845    """
1846
1847    import sys
1848    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1849         sometrue
1850    from Numeric import array2string
1851
1852    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1853         separate_points_by_polygon
1854    from anuga.abstract_2d_finite_volumes.util import \
1855         apply_expression_to_dictionary
1856
1857    msg = 'Format must be either asc or ers'
1858    assert format.lower() in ['asc', 'ers'], msg
1859
1860
1861    false_easting = 500000
1862    false_northing = 10000000
1863
1864    if quantity is None:
1865        quantity = 'elevation'
1866       
1867    if reduction is None:
1868        reduction = max
1869
1870    if basename_out is None:
1871        basename_out = basename_in + '_%s' %quantity
1872
1873    if quantity_formula.has_key(quantity):
1874        quantity = quantity_formula[quantity]
1875       
1876    swwfile = basename_in + '.sww'
1877    demfile = basename_out + '.' + format
1878    # Note the use of a .ers extension is optional (write_ermapper_grid will
1879    # deal with either option
1880   
1881    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
1882   
1883    # Read sww file
1884    if verbose: 
1885        print 'Reading from %s' %swwfile
1886        print 'Output directory is %s' %basename_out
1887   
1888    from Scientific.IO.NetCDF import NetCDFFile
1889    fid = NetCDFFile(swwfile)
1890
1891    #Get extent and reference
1892    x = fid.variables['x'][:]
1893    y = fid.variables['y'][:]
1894    volumes = fid.variables['volumes'][:]
1895    if timestep is not None:
1896        times = fid.variables['time'][timestep]
1897    else:
1898        times = fid.variables['time'][:]
1899
1900    number_of_timesteps = fid.dimensions['number_of_timesteps']
1901    number_of_points = fid.dimensions['number_of_points']
1902   
1903    if origin is None:
1904
1905        # Get geo_reference
1906        # sww files don't have to have a geo_ref
1907        try:
1908            geo_reference = Geo_reference(NetCDFObject=fid)
1909        except AttributeError, e:
1910            geo_reference = Geo_reference() # Default georef object
1911
1912        xllcorner = geo_reference.get_xllcorner()
1913        yllcorner = geo_reference.get_yllcorner()
1914        zone = geo_reference.get_zone()
1915    else:
1916        zone = origin[0]
1917        xllcorner = origin[1]
1918        yllcorner = origin[2]
1919
1920
1921
1922    # FIXME: Refactor using code from Interpolation_function.statistics
1923    # (in interpolate.py)
1924    # Something like print swwstats(swwname)
1925    if verbose:
1926        print '------------------------------------------------'
1927        print 'Statistics of SWW file:'
1928        print '  Name: %s' %swwfile
1929        print '  Reference:'
1930        print '    Lower left corner: [%f, %f]'\
1931              %(xllcorner, yllcorner)
1932        if timestep is not None:
1933            print '    Time: %f' %(times)
1934        else:
1935            print '    Start time: %f' %fid.starttime[0]
1936        print '  Extent:'
1937        print '    x [m] in [%f, %f], len(x) == %d'\
1938              %(min(x.flat), max(x.flat), len(x.flat))
1939        print '    y [m] in [%f, %f], len(y) == %d'\
1940              %(min(y.flat), max(y.flat), len(y.flat))
1941        if timestep is not None:
1942            print '    t [s] = %f, len(t) == %d' %(times, 1)
1943        else:
1944            print '    t [s] in [%f, %f], len(t) == %d'\
1945              %(min(times), max(times), len(times))
1946        print '  Quantities [SI units]:'
1947        # Comment out for reduced memory consumption
1948        for name in ['stage', 'xmomentum', 'ymomentum']:
1949            q = fid.variables[name][:].flat
1950            if timestep is not None:
1951                q = q[timestep*len(x):(timestep+1)*len(x)]
1952            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1953        for name in ['elevation']:
1954            q = fid.variables[name][:].flat
1955            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1956           
1957    # Get quantity and reduce if applicable
1958    if verbose: print 'Processing quantity %s' %quantity
1959
1960    # Turn NetCDF objects into Numeric arrays
1961    try:
1962        q = fid.variables[quantity][:] 
1963       
1964       
1965    except:
1966        quantity_dict = {}
1967        for name in fid.variables.keys():
1968            quantity_dict[name] = fid.variables[name][:] 
1969        #Convert quantity expression to quantities found in sww file   
1970        q = apply_expression_to_dictionary(quantity, quantity_dict)
1971    #print "q.shape",q.shape
1972    if len(q.shape) == 2:
1973        #q has a time component and needs to be reduced along
1974        #the temporal dimension
1975        if verbose: print 'Reducing quantity %s' %quantity
1976        q_reduced = zeros( number_of_points, Float )
1977       
1978        if timestep is not None:
1979            for k in range(number_of_points):
1980                q_reduced[k] = q[timestep,k] 
1981        else:
1982            for k in range(number_of_points):
1983                q_reduced[k] = reduction( q[:,k] )
1984
1985        q = q_reduced
1986
1987    #Post condition: Now q has dimension: number_of_points
1988    assert len(q.shape) == 1
1989    assert q.shape[0] == number_of_points
1990
1991    if verbose:
1992        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1993
1994
1995    #Create grid and update xll/yll corner and x,y
1996
1997    #Relative extent
1998    if easting_min is None:
1999        xmin = min(x)
2000    else:
2001        xmin = easting_min - xllcorner
2002
2003    if easting_max is None:
2004        xmax = max(x)
2005    else:
2006        xmax = easting_max - xllcorner
2007
2008    if northing_min is None:
2009        ymin = min(y)
2010    else:
2011        ymin = northing_min - yllcorner
2012
2013    if northing_max is None:
2014        ymax = max(y)
2015    else:
2016        ymax = northing_max - yllcorner
2017
2018
2019
2020    if verbose: print 'Creating grid'
2021    ncols = int((xmax-xmin)/cellsize)+1
2022    nrows = int((ymax-ymin)/cellsize)+1
2023   
2024
2025
2026    #New absolute reference and coordinates
2027    newxllcorner = xmin+xllcorner
2028    newyllcorner = ymin+yllcorner
2029
2030    x = x+xllcorner-newxllcorner
2031    y = y+yllcorner-newyllcorner
2032   
2033    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2034    assert len(vertex_points.shape) == 2
2035
2036    grid_points = zeros ( (ncols*nrows, 2), Float )
2037
2038
2039    for i in xrange(nrows):
2040        if format.lower() == 'asc':
2041            yg = i*cellsize
2042        else:
2043        #this will flip the order of the y values for ers
2044            yg = (nrows-i)*cellsize
2045
2046        for j in xrange(ncols):
2047            xg = j*cellsize
2048            k = i*ncols + j
2049
2050            grid_points[k,0] = xg
2051            grid_points[k,1] = yg
2052
2053    #Interpolate
2054    from anuga.fit_interpolate.interpolate import Interpolate
2055
2056    # Remove loners from vertex_points, volumes here
2057    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
2058    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
2059    #import sys; sys.exit()
2060    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2061
2062    #Interpolate using quantity values
2063    if verbose: print 'Interpolating'
2064    grid_values = interp.interpolate(q, grid_points).flat
2065
2066
2067    if verbose:
2068        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2069                                                      max(grid_values))
2070
2071    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2072    P = interp.mesh.get_boundary_polygon()
2073    outside_indices = outside_polygon(grid_points, P, closed=True)
2074
2075    for i in outside_indices:
2076        grid_values[i] = NODATA_value
2077
2078
2079
2080
2081    if format.lower() == 'ers':
2082        # setup ERS header information
2083        grid_values = reshape(grid_values,(nrows, ncols))
2084        header = {}
2085        header['datum'] = '"' + datum + '"'
2086        # FIXME The use of hardwired UTM and zone number needs to be made optional
2087        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2088        header['projection'] = '"UTM-' + str(zone) + '"'
2089        header['coordinatetype'] = 'EN'
2090        if header['coordinatetype'] == 'LL':
2091            header['longitude'] = str(newxllcorner)
2092            header['latitude'] = str(newyllcorner)
2093        elif header['coordinatetype'] == 'EN':
2094            header['eastings'] = str(newxllcorner)
2095            header['northings'] = str(newyllcorner)
2096        header['nullcellvalue'] = str(NODATA_value)
2097        header['xdimension'] = str(cellsize)
2098        header['ydimension'] = str(cellsize)
2099        header['value'] = '"' + quantity + '"'
2100        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2101
2102
2103        #Write
2104        if verbose: print 'Writing %s' %demfile
2105        import ermapper_grids
2106        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2107
2108        fid.close()
2109    else:
2110        #Write to Ascii format
2111
2112        #Write prj file
2113        prjfile = basename_out + '.prj'
2114
2115        if verbose: print 'Writing %s' %prjfile
2116        prjid = open(prjfile, 'w')
2117        prjid.write('Projection    %s\n' %'UTM')
2118        prjid.write('Zone          %d\n' %zone)
2119        prjid.write('Datum         %s\n' %datum)
2120        prjid.write('Zunits        NO\n')
2121        prjid.write('Units         METERS\n')
2122        prjid.write('Spheroid      %s\n' %datum)
2123        prjid.write('Xshift        %d\n' %false_easting)
2124        prjid.write('Yshift        %d\n' %false_northing)
2125        prjid.write('Parameters\n')
2126        prjid.close()
2127
2128
2129
2130        if verbose: print 'Writing %s' %demfile
2131
2132        ascid = open(demfile, 'w')
2133
2134        ascid.write('ncols         %d\n' %ncols)
2135        ascid.write('nrows         %d\n' %nrows)
2136        ascid.write('xllcorner     %d\n' %newxllcorner)
2137        ascid.write('yllcorner     %d\n' %newyllcorner)
2138        ascid.write('cellsize      %f\n' %cellsize)
2139        ascid.write('NODATA_value  %d\n' %NODATA_value)
2140
2141
2142        #Get bounding polygon from mesh
2143        #P = interp.mesh.get_boundary_polygon()
2144        #inside_indices = inside_polygon(grid_points, P)
2145
2146        for i in range(nrows):
2147            if verbose and i%((nrows+10)/10)==0:
2148                print 'Doing row %d of %d' %(i, nrows)
2149
2150            base_index = (nrows-i-1)*ncols
2151
2152            slice = grid_values[base_index:base_index+ncols]
2153            s = array2string(slice, max_line_width=sys.maxint)
2154            ascid.write(s[1:-1] + '\n')
2155
2156
2157            #print
2158            #for j in range(ncols):
2159            #    index = base_index+j#
2160            #    print grid_values[index],
2161            #    ascid.write('%f ' %grid_values[index])
2162            #ascid.write('\n')
2163
2164        #Close
2165        ascid.close()
2166        fid.close()
2167        return basename_out
2168
2169
2170#Backwards compatibility
2171def sww2asc(basename_in, basename_out = None,
2172            quantity = None,
2173            timestep = None,
2174            reduction = None,
2175            cellsize = 10,
2176            verbose = False,
2177            origin = None):
2178    print 'sww2asc will soon be obsoleted - please use sww2dem'
2179    sww2dem(basename_in,
2180            basename_out = basename_out,
2181            quantity = quantity,
2182            timestep = timestep,
2183            reduction = reduction,
2184            cellsize = cellsize,
2185            verbose = verbose,
2186            origin = origin,
2187        datum = 'WGS84',
2188        format = 'asc')
2189
2190def sww2ers(basename_in, basename_out = None,
2191            quantity = None,
2192            timestep = None,
2193            reduction = None,
2194            cellsize = 10,
2195            verbose = False,
2196            origin = None,
2197            datum = 'WGS84'):
2198    print 'sww2ers will soon be obsoleted - please use sww2dem'
2199    sww2dem(basename_in,
2200            basename_out = basename_out,
2201            quantity = quantity,
2202            timestep = timestep,
2203            reduction = reduction,
2204            cellsize = cellsize,
2205            verbose = verbose,
2206            origin = origin,
2207            datum = datum,
2208            format = 'ers')
2209################################# END COMPATIBILITY ##############
2210
2211
2212
2213def sww2pts(basename_in, basename_out=None,
2214            data_points=None,
2215            quantity=None,
2216            timestep=None,
2217            reduction=None,
2218            NODATA_value=-9999,
2219            verbose=False,
2220            origin=None):
2221            #datum = 'WGS84')
2222
2223
2224    """Read SWW file and convert to interpolated values at selected points
2225
2226    The parameter quantity' must be the name of an existing quantity or
2227    an expression involving existing quantities. The default is
2228    'elevation'.
2229
2230    if timestep (an index) is given, output quantity at that timestep
2231
2232    if reduction is given use that to reduce quantity over all timesteps.
2233
2234    data_points (Nx2 array) give locations of points where quantity is to be computed.
2235   
2236    """
2237
2238    import sys
2239    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2240    from Numeric import array2string
2241
2242    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2243    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2244
2245    from anuga.geospatial_data.geospatial_data import Geospatial_data
2246
2247    if quantity is None:
2248        quantity = 'elevation'
2249
2250    if reduction is None:
2251        reduction = max
2252
2253    if basename_out is None:
2254        basename_out = basename_in + '_%s' %quantity
2255
2256    swwfile = basename_in + '.sww'
2257    ptsfile = basename_out + '.pts'
2258
2259    # Read sww file
2260    if verbose: print 'Reading from %s' %swwfile
2261    from Scientific.IO.NetCDF import NetCDFFile
2262    fid = NetCDFFile(swwfile)
2263
2264    # Get extent and reference
2265    x = fid.variables['x'][:]
2266    y = fid.variables['y'][:]
2267    volumes = fid.variables['volumes'][:]
2268
2269    number_of_timesteps = fid.dimensions['number_of_timesteps']
2270    number_of_points = fid.dimensions['number_of_points']
2271    if origin is None:
2272
2273        # Get geo_reference
2274        # sww files don't have to have a geo_ref
2275        try:
2276            geo_reference = Geo_reference(NetCDFObject=fid)
2277        except AttributeError, e:
2278            geo_reference = Geo_reference() #Default georef object
2279
2280        xllcorner = geo_reference.get_xllcorner()
2281        yllcorner = geo_reference.get_yllcorner()
2282        zone = geo_reference.get_zone()
2283    else:
2284        zone = origin[0]
2285        xllcorner = origin[1]
2286        yllcorner = origin[2]
2287
2288
2289
2290    # FIXME: Refactor using code from file_function.statistics
2291    # Something like print swwstats(swwname)
2292    if verbose:
2293        x = fid.variables['x'][:]
2294        y = fid.variables['y'][:]
2295        times = fid.variables['time'][:]
2296        print '------------------------------------------------'
2297        print 'Statistics of SWW file:'
2298        print '  Name: %s' %swwfile
2299        print '  Reference:'
2300        print '    Lower left corner: [%f, %f]'\
2301              %(xllcorner, yllcorner)
2302        print '    Start time: %f' %fid.starttime[0]
2303        print '  Extent:'
2304        print '    x [m] in [%f, %f], len(x) == %d'\
2305              %(min(x.flat), max(x.flat), len(x.flat))
2306        print '    y [m] in [%f, %f], len(y) == %d'\
2307              %(min(y.flat), max(y.flat), len(y.flat))
2308        print '    t [s] in [%f, %f], len(t) == %d'\
2309              %(min(times), max(times), len(times))
2310        print '  Quantities [SI units]:'
2311        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2312            q = fid.variables[name][:].flat
2313            print '    %s in [%f, %f]' %(name, min(q), max(q))
2314
2315
2316
2317    # Get quantity and reduce if applicable
2318    if verbose: print 'Processing quantity %s' %quantity
2319
2320    # Turn NetCDF objects into Numeric arrays
2321    quantity_dict = {}
2322    for name in fid.variables.keys():
2323        quantity_dict[name] = fid.variables[name][:]
2324
2325
2326
2327    # Convert quantity expression to quantities found in sww file   
2328    q = apply_expression_to_dictionary(quantity, quantity_dict)
2329
2330
2331
2332    if len(q.shape) == 2:
2333        # q has a time component and needs to be reduced along
2334        # the temporal dimension
2335        if verbose: print 'Reducing quantity %s' %quantity
2336        q_reduced = zeros( number_of_points, Float )
2337
2338        for k in range(number_of_points):
2339            q_reduced[k] = reduction( q[:,k] )
2340
2341        q = q_reduced
2342
2343    # Post condition: Now q has dimension: number_of_points
2344    assert len(q.shape) == 1
2345    assert q.shape[0] == number_of_points
2346
2347
2348    if verbose:
2349        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2350
2351
2352    # Create grid and update xll/yll corner and x,y
2353    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2354    assert len(vertex_points.shape) == 2
2355
2356    # Interpolate
2357    from anuga.fit_interpolate.interpolate import Interpolate
2358    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2359
2360    # Interpolate using quantity values
2361    if verbose: print 'Interpolating'
2362    interpolated_values = interp.interpolate(q, data_points).flat
2363
2364
2365    if verbose:
2366        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2367                                                      max(interpolated_values))
2368
2369    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2370    P = interp.mesh.get_boundary_polygon()
2371    outside_indices = outside_polygon(data_points, P, closed=True)
2372
2373    for i in outside_indices:
2374        interpolated_values[i] = NODATA_value
2375
2376
2377    # Store results   
2378    G = Geospatial_data(data_points=data_points,
2379                        attributes=interpolated_values)
2380
2381    G.export_points_file(ptsfile, absolute = True)
2382
2383    fid.close()
2384
2385
2386def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2387                                  use_cache = False,
2388                                  verbose = False):
2389    """Read Digitial Elevation model from the following ASCII format (.asc)
2390
2391    Example:
2392
2393    ncols         3121
2394    nrows         1800
2395    xllcorner     722000
2396    yllcorner     5893000
2397    cellsize      25
2398    NODATA_value  -9999
2399    138.3698 137.4194 136.5062 135.5558 ..........
2400
2401    Convert basename_in + '.asc' to NetCDF format (.dem)
2402    mimicking the ASCII format closely.
2403
2404
2405    An accompanying file with same basename_in but extension .prj must exist
2406    and is used to fix the UTM zone, datum, false northings and eastings.
2407
2408    The prj format is assumed to be as
2409
2410    Projection    UTM
2411    Zone          56
2412    Datum         WGS84
2413    Zunits        NO
2414    Units         METERS
2415    Spheroid      WGS84
2416    Xshift        0.0000000000
2417    Yshift        10000000.0000000000
2418    Parameters
2419    """
2420
2421
2422
2423    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2424
2425    if use_cache is True:
2426        from caching import cache
2427        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2428                       dependencies = [basename_in + '.asc',
2429                                       basename_in + '.prj'],
2430                       verbose = verbose)
2431
2432    else:
2433        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2434
2435    return result
2436
2437
2438
2439
2440
2441
2442def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2443                                  verbose = False):
2444    """Read Digitial Elevation model from the following ASCII format (.asc)
2445
2446    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2447    """
2448
2449    import os
2450    from Scientific.IO.NetCDF import NetCDFFile
2451    from Numeric import Float, array
2452
2453    #root, ext = os.path.splitext(basename_in)
2454    root = basename_in
2455
2456    ###########################################
2457    # Read Meta data
2458    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2459    metadatafile = open(root + '.prj')
2460    metalines = metadatafile.readlines()
2461    metadatafile.close()
2462
2463    L = metalines[0].strip().split()
2464    assert L[0].strip().lower() == 'projection'
2465    projection = L[1].strip()                   #TEXT
2466
2467    L = metalines[1].strip().split()
2468    assert L[0].strip().lower() == 'zone'
2469    zone = int(L[1].strip())
2470
2471    L = metalines[2].strip().split()
2472    assert L[0].strip().lower() == 'datum'
2473    datum = L[1].strip()                        #TEXT
2474
2475    L = metalines[3].strip().split()
2476    assert L[0].strip().lower() == 'zunits'     #IGNORE
2477    zunits = L[1].strip()                       #TEXT
2478
2479    L = metalines[4].strip().split()
2480    assert L[0].strip().lower() == 'units'
2481    units = L[1].strip()                        #TEXT
2482
2483    L = metalines[5].strip().split()
2484    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2485    spheroid = L[1].strip()                     #TEXT
2486
2487    L = metalines[6].strip().split()
2488    assert L[0].strip().lower() == 'xshift'
2489    false_easting = float(L[1].strip())
2490
2491    L = metalines[7].strip().split()
2492    assert L[0].strip().lower() == 'yshift'
2493    false_northing = float(L[1].strip())
2494
2495    #print false_easting, false_northing, zone, datum
2496
2497
2498    ###########################################
2499    #Read DEM data
2500
2501    datafile = open(basename_in + '.asc')
2502
2503    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2504    lines = datafile.readlines()
2505    datafile.close()
2506
2507    if verbose: print 'Got', len(lines), ' lines'
2508
2509    ncols = int(lines[0].split()[1].strip())
2510    nrows = int(lines[1].split()[1].strip())
2511
2512    # Do cellsize (line 4) before line 2 and 3
2513    cellsize = float(lines[4].split()[1].strip())   
2514
2515    # Checks suggested by Joaquim Luis
2516    # Our internal representation of xllcorner
2517    # and yllcorner is non-standard.
2518    xref = lines[2].split()
2519    if xref[0].strip() == 'xllcorner':
2520        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2521    elif xref[0].strip() == 'xllcenter':
2522        xllcorner = float(xref[1].strip())
2523    else:
2524        msg = 'Unknown keyword: %s' %xref[0].strip()
2525        raise Exception, msg
2526
2527    yref = lines[3].split()
2528    if yref[0].strip() == 'yllcorner':
2529        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2530    elif yref[0].strip() == 'yllcenter':
2531        yllcorner = float(yref[1].strip())
2532    else:
2533        msg = 'Unknown keyword: %s' %yref[0].strip()
2534        raise Exception, msg
2535       
2536
2537    NODATA_value = int(lines[5].split()[1].strip())
2538
2539    assert len(lines) == nrows + 6
2540
2541
2542    ##########################################
2543
2544
2545    if basename_out == None:
2546        netcdfname = root + '.dem'
2547    else:
2548        netcdfname = basename_out + '.dem'
2549
2550    if verbose: print 'Store to NetCDF file %s' %netcdfname
2551    # NetCDF file definition
2552    fid = NetCDFFile(netcdfname, 'w')
2553
2554    #Create new file
2555    fid.institution = 'Geoscience Australia'
2556    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2557                      'of spatial point data'
2558
2559    fid.ncols = ncols
2560    fid.nrows = nrows
2561    fid.xllcorner = xllcorner
2562    fid.yllcorner = yllcorner
2563    fid.cellsize = cellsize
2564    fid.NODATA_value = NODATA_value
2565
2566    fid.zone = zone
2567    fid.false_easting = false_easting
2568    fid.false_northing = false_northing
2569    fid.projection = projection
2570    fid.datum = datum
2571    fid.units = units
2572
2573
2574    # dimension definitions
2575    fid.createDimension('number_of_rows', nrows)
2576    fid.createDimension('number_of_columns', ncols)
2577
2578    # variable definitions
2579    fid.createVariable('elevation', Float, ('number_of_rows',
2580                                            'number_of_columns'))
2581
2582    # Get handles to the variables
2583    elevation = fid.variables['elevation']
2584
2585    #Store data
2586    n = len(lines[6:])
2587    for i, line in enumerate(lines[6:]):
2588        fields = line.split()
2589        if verbose and i%((n+10)/10)==0:
2590            print 'Processing row %d of %d' %(i, nrows)
2591
2592        elevation[i, :] = array([float(x) for x in fields])
2593
2594    fid.close()
2595
2596
2597
2598
2599
2600def ferret2sww(basename_in, basename_out = None,
2601               verbose = False,
2602               minlat = None, maxlat = None,
2603               minlon = None, maxlon = None,
2604               mint = None, maxt = None, mean_stage = 0,
2605               origin = None, zscale = 1,
2606               fail_on_NaN = True,
2607               NaN_filler = 0,
2608               elevation = None,
2609               inverted_bathymetry = True
2610               ): #FIXME: Bathymetry should be obtained
2611                                  #from MOST somehow.
2612                                  #Alternatively from elsewhere
2613                                  #or, as a last resort,
2614                                  #specified here.
2615                                  #The value of -100 will work
2616                                  #for the Wollongong tsunami
2617                                  #scenario but is very hacky
2618    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2619    sww format native to abstract_2d_finite_volumes.
2620
2621    Specify only basename_in and read files of the form
2622    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2623    relative height, x-velocity and y-velocity, respectively.
2624
2625    Also convert latitude and longitude to UTM. All coordinates are
2626    assumed to be given in the GDA94 datum.
2627
2628    min's and max's: If omitted - full extend is used.
2629    To include a value min may equal it, while max must exceed it.
2630    Lat and lon are assuemd to be in decimal degrees
2631
2632    origin is a 3-tuple with geo referenced
2633    UTM coordinates (zone, easting, northing)
2634
2635    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2636    which means that longitude is the fastest
2637    varying dimension (row major order, so to speak)
2638
2639    ferret2sww uses grid points as vertices in a triangular grid
2640    counting vertices from lower left corner upwards, then right
2641    """
2642
2643    import os
2644    from Scientific.IO.NetCDF import NetCDFFile
2645    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2646    from Numeric import allclose, around
2647
2648    precision = Float
2649
2650    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2651
2652    if minlat != None:
2653        assert -90 < minlat < 90 , msg
2654    if maxlat != None:
2655        assert -90 < maxlat < 90 , msg
2656        if minlat != None:
2657            assert maxlat > minlat
2658    if minlon != None:
2659        assert -180 < minlon < 180 , msg
2660    if maxlon != None:
2661        assert -180 < maxlon < 180 , msg
2662        if minlon != None:
2663            assert maxlon > minlon
2664       
2665
2666
2667    #Get NetCDF data
2668    if verbose: print 'Reading files %s_*.nc' %basename_in
2669    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2670    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2671    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2672    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2673    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2674
2675    if basename_out is None:
2676        swwname = basename_in + '.sww'
2677    else:
2678        swwname = basename_out + '.sww'
2679
2680    # Get dimensions of file_h
2681    for dimension in file_h.dimensions.keys():
2682        if dimension[:3] == 'LON':
2683            dim_h_longitude = dimension
2684        if dimension[:3] == 'LAT':
2685            dim_h_latitude = dimension
2686        if dimension[:4] == 'TIME':
2687            dim_h_time = dimension
2688
2689#    print 'long:', dim_h_longitude
2690#    print 'lats:', dim_h_latitude
2691#    print 'times:', dim_h_time
2692
2693    times = file_h.variables[dim_h_time]
2694    latitudes = file_h.variables[dim_h_latitude]
2695    longitudes = file_h.variables[dim_h_longitude]
2696   
2697    # get dimensions for file_e
2698    for dimension in file_e.dimensions.keys():
2699        if dimension[:3] == 'LON':
2700            dim_e_longitude = dimension
2701        if dimension[:3] == 'LAT':
2702            dim_e_latitude = dimension
2703
2704    # get dimensions for file_u
2705    for dimension in file_u.dimensions.keys():
2706        if dimension[:3] == 'LON':
2707            dim_u_longitude = dimension
2708        if dimension[:3] == 'LAT':
2709            dim_u_latitude = dimension
2710        if dimension[:4] == 'TIME':
2711            dim_u_time = dimension
2712           
2713    # get dimensions for file_v
2714    for dimension in file_v.dimensions.keys():
2715        if dimension[:3] == 'LON':
2716            dim_v_longitude = dimension
2717        if dimension[:3] == 'LAT':
2718            dim_v_latitude = dimension
2719        if dimension[:4] == 'TIME':
2720            dim_v_time = dimension
2721
2722
2723    # Precision used by most for lat/lon is 4 or 5 decimals
2724    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2725    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2726
2727    # Check that files are compatible
2728    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2729    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2730    assert allclose(latitudes, e_lat)
2731
2732    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2733    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2734    assert allclose(longitudes, e_lon)
2735
2736    if mint is None:
2737        jmin = 0
2738        mint = times[0]       
2739    else:
2740        jmin = searchsorted(times, mint)
2741       
2742    if maxt is None:
2743        jmax = len(times)
2744        maxt = times[-1]
2745    else:
2746        jmax = searchsorted(times, maxt)
2747
2748    #print "latitudes[:]",latitudes[:]
2749    #print "longitudes[:]",longitudes [:]
2750    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2751                                                  longitudes[:],
2752                                                  minlat, maxlat,
2753                                                  minlon, maxlon)
2754
2755
2756    times = times[jmin:jmax]
2757    latitudes = latitudes[kmin:kmax]
2758    longitudes = longitudes[lmin:lmax]
2759
2760    #print "latitudes[:]",latitudes[:]
2761    #print "longitudes[:]",longitudes [:]
2762
2763    if verbose: print 'cropping'
2764    zname = 'ELEVATION'
2765
2766    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2767    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2768    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2769    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2770
2771    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2772    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2773    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2774    #        from Numeric import asarray
2775    #        elevations=elevations.tolist()
2776    #        elevations.reverse()
2777    #        elevations=asarray(elevations)
2778    #    else:
2779    #        from Numeric import asarray
2780    #        elevations=elevations.tolist()
2781    #        elevations.reverse()
2782    #        elevations=asarray(elevations)
2783    #        'print hmmm'
2784
2785
2786
2787    #Get missing values
2788    nan_ha = file_h.variables['HA'].missing_value[0]
2789    nan_ua = file_u.variables['UA'].missing_value[0]
2790    nan_va = file_v.variables['VA'].missing_value[0]
2791    if hasattr(file_e.variables[zname],'missing_value'):
2792        nan_e  = file_e.variables[zname].missing_value[0]
2793    else:
2794        nan_e = None
2795
2796    #Cleanup
2797    from Numeric import sometrue
2798
2799    missing = (amplitudes == nan_ha)
2800    if sometrue (missing):
2801        if fail_on_NaN:
2802            msg = 'NetCDFFile %s contains missing values'\
2803                  %(basename_in+'_ha.nc')
2804            raise DataMissingValuesError, msg
2805        else:
2806            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2807
2808    missing = (uspeed == nan_ua)
2809    if sometrue (missing):
2810        if fail_on_NaN:
2811            msg = 'NetCDFFile %s contains missing values'\
2812                  %(basename_in+'_ua.nc')
2813            raise DataMissingValuesError, msg
2814        else:
2815            uspeed = uspeed*(missing==0) + missing*NaN_filler
2816
2817    missing = (vspeed == nan_va)
2818    if sometrue (missing):
2819        if fail_on_NaN:
2820            msg = 'NetCDFFile %s contains missing values'\
2821                  %(basename_in+'_va.nc')
2822            raise DataMissingValuesError, msg
2823        else:
2824            vspeed = vspeed*(missing==0) + missing*NaN_filler
2825
2826
2827    missing = (elevations == nan_e)
2828    if sometrue (missing):
2829        if fail_on_NaN:
2830            msg = 'NetCDFFile %s contains missing values'\
2831                  %(basename_in+'_e.nc')
2832            raise DataMissingValuesError, msg
2833        else:
2834            elevations = elevations*(missing==0) + missing*NaN_filler
2835
2836    #######
2837
2838
2839
2840    number_of_times = times.shape[0]
2841    number_of_latitudes = latitudes.shape[0]
2842    number_of_longitudes = longitudes.shape[0]
2843
2844    assert amplitudes.shape[0] == number_of_times
2845    assert amplitudes.shape[1] == number_of_latitudes
2846    assert amplitudes.shape[2] == number_of_longitudes
2847
2848    if verbose:
2849        print '------------------------------------------------'
2850        print 'Statistics:'
2851        print '  Extent (lat/lon):'
2852        print '    lat in [%f, %f], len(lat) == %d'\
2853              %(min(latitudes.flat), max(latitudes.flat),
2854                len(latitudes.flat))
2855        print '    lon in [%f, %f], len(lon) == %d'\
2856              %(min(longitudes.flat), max(longitudes.flat),
2857                len(longitudes.flat))
2858        print '    t in [%f, %f], len(t) == %d'\
2859              %(min(times.flat), max(times.flat), len(times.flat))
2860
2861        q = amplitudes.flat
2862        name = 'Amplitudes (ha) [cm]'
2863        print %s in [%f, %f]' %(name, min(q), max(q))
2864
2865        q = uspeed.flat
2866        name = 'Speeds (ua) [cm/s]'
2867        print %s in [%f, %f]' %(name, min(q), max(q))
2868
2869        q = vspeed.flat
2870        name = 'Speeds (va) [cm/s]'
2871        print %s in [%f, %f]' %(name, min(q), max(q))
2872
2873        q = elevations.flat
2874        name = 'Elevations (e) [m]'
2875        print %s in [%f, %f]' %(name, min(q), max(q))
2876
2877
2878    # print number_of_latitudes, number_of_longitudes
2879    number_of_points = number_of_latitudes*number_of_longitudes
2880    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2881
2882
2883    file_h.close()
2884    file_u.close()
2885    file_v.close()
2886    file_e.close()
2887
2888
2889    # NetCDF file definition
2890    outfile = NetCDFFile(swwname, 'w')
2891
2892    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2893                  %(basename_in + '_ha.nc',
2894                    basename_in + '_ua.nc',
2895                    basename_in + '_va.nc',
2896                    basename_in + '_e.nc')
2897   
2898    # Create new file
2899    starttime = times[0]
2900   
2901    sww = Write_sww()
2902    sww.store_header(outfile, times, number_of_volumes,
2903                     number_of_points, description=description,
2904                     verbose=verbose,sww_precision=Float)
2905
2906    # Store
2907    from anuga.coordinate_transforms.redfearn import redfearn
2908    x = zeros(number_of_points, Float)  #Easting
2909    y = zeros(number_of_points, Float)  #Northing
2910
2911
2912    if verbose: print 'Making triangular grid'
2913
2914    # Check zone boundaries
2915    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2916
2917    vertices = {}
2918    i = 0
2919    for k, lat in enumerate(latitudes):       #Y direction
2920        for l, lon in enumerate(longitudes):  #X direction
2921
2922            vertices[l,k] = i
2923
2924            zone, easting, northing = redfearn(lat,lon)
2925
2926            msg = 'Zone boundary crossed at longitude =', lon
2927            #assert zone == refzone, msg
2928            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2929            x[i] = easting
2930            y[i] = northing
2931            i += 1
2932
2933    #Construct 2 triangles per 'rectangular' element
2934    volumes = []
2935    for l in range(number_of_longitudes-1):    #X direction
2936        for k in range(number_of_latitudes-1): #Y direction
2937            v1 = vertices[l,k+1]
2938            v2 = vertices[l,k]
2939            v3 = vertices[l+1,k+1]
2940            v4 = vertices[l+1,k]
2941
2942            volumes.append([v1,v2,v3]) #Upper element
2943            volumes.append([v4,v3,v2]) #Lower element
2944
2945    volumes = array(volumes)
2946
2947    if origin is None:
2948        origin = Geo_reference(refzone,min(x),min(y))
2949    geo_ref = write_NetCDF_georeference(origin, outfile)
2950   
2951    if elevation is not None:
2952        z = elevation
2953    else:
2954        if inverted_bathymetry:
2955            z = -1*elevations
2956        else:
2957            z = elevations
2958    #FIXME: z should be obtained from MOST and passed in here
2959
2960    #FIXME use the Write_sww instance(sww) to write this info
2961    from Numeric import resize
2962    z = resize(z,outfile.variables['z'][:].shape)
2963    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2964    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
2965    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2966    outfile.variables['elevation'][:] = z
2967    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2968
2969
2970
2971    #Time stepping
2972    stage = outfile.variables['stage']
2973    xmomentum = outfile.variables['xmomentum']
2974    ymomentum = outfile.variables['ymomentum']
2975
2976    if verbose: print 'Converting quantities'
2977    n = len(times)
2978    for j in range(n):
2979        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2980        i = 0
2981        for k in range(number_of_latitudes):      #Y direction
2982            for l in range(number_of_longitudes): #X direction
2983                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2984                stage[j,i] = w
2985                h = w - z[i]
2986                xmomentum[j,i] = uspeed[j,k,l]/100*h
2987                ymomentum[j,i] = vspeed[j,k,l]/100*h
2988                i += 1
2989
2990    #outfile.close()
2991
2992    #FIXME: Refactor using code from file_function.statistics
2993    #Something like print swwstats(swwname)
2994    if verbose:
2995        x = outfile.variables['x'][:]
2996        y = outfile.variables['y'][:]
2997        print '------------------------------------------------'
2998        print 'Statistics of output file:'
2999        print '  Name: %s' %swwname
3000        print '  Reference:'
3001        print '    Lower left corner: [%f, %f]'\
3002              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
3003        print ' Start time: %f' %starttime
3004        print '    Min time: %f' %mint
3005        print '    Max time: %f' %maxt
3006        print '  Extent:'
3007        print '    x [m] in [%f, %f], len(x) == %d'\
3008              %(min(x.flat), max(x.flat), len(x.flat))
3009        print '    y [m] in [%f, %f], len(y) == %d'\
3010              %(min(y.flat), max(y.flat), len(y.flat))
3011        print '    t [s] in [%f, %f], len(t) == %d'\
3012              %(min(times), max(times), len(times))
3013        print '  Quantities [SI units]:'
3014        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3015            q = outfile.variables[name][:].flat
3016            print '    %s in [%f, %f]' %(name, min(q), max(q))
3017
3018
3019
3020    outfile.close()
3021
3022
3023
3024
3025
3026def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3027    """Template for converting typical text files with time series to
3028    NetCDF tms file.
3029
3030
3031    The file format is assumed to be either two fields separated by a comma:
3032
3033        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3034
3035    E.g
3036
3037      31/08/04 00:00:00, 1.328223 0 0
3038      31/08/04 00:15:00, 1.292912 0 0
3039
3040    or time (seconds), value0 value1 value2 ...
3041   
3042      0.0, 1.328223 0 0
3043      0.1, 1.292912 0 0
3044
3045    will provide a time dependent function f(t) with three attributes
3046
3047    filename is assumed to be the rootname with extenisons .txt and .sww
3048    """
3049
3050    import time, calendar
3051    from Numeric import array
3052    from anuga.config import time_format
3053    from anuga.utilities.numerical_tools import ensure_numeric
3054
3055
3056
3057    fid = open(filename + '.txt')
3058    line = fid.readline()
3059    fid.close()
3060
3061    fields = line.split(',')
3062    msg = 'File %s must have the format date, value0 value1 value2 ...'
3063    assert len(fields) == 2, msg
3064
3065    if not time_as_seconds:
3066        try:
3067            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3068        except ValueError:
3069            msg = 'First field in file %s must be' %filename
3070            msg += ' date-time with format %s.\n' %time_format
3071            msg += 'I got %s instead.' %fields[0]
3072            raise DataTimeError, msg
3073    else:
3074        try:
3075            starttime = float(fields[0])
3076        except Error:
3077            msg = "Bad time format"
3078            raise DataTimeError, msg
3079
3080
3081    #Split values
3082    values = []
3083    for value in fields[1].split():
3084        values.append(float(value))
3085
3086    q = ensure_numeric(values)
3087
3088    msg = 'ERROR: File must contain at least one independent value'
3089    assert len(q.shape) == 1, msg
3090
3091
3092
3093    #Read times proper
3094    from Numeric import zeros, Float, alltrue
3095    from anuga.config import time_format
3096    import time, calendar
3097
3098    fid = open(filename + '.txt')
3099    lines = fid.readlines()
3100    fid.close()
3101
3102    N = len(lines)
3103    d = len(q)
3104
3105    T = zeros(N, Float)       #Time
3106    Q = zeros((N, d), Float)  #Values
3107
3108    for i, line in enumerate(lines):
3109        fields = line.split(',')
3110        if not time_as_seconds:
3111            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3112        else:
3113             realtime = float(fields[0])
3114        T[i] = realtime - starttime
3115
3116        for j, value in enumerate(fields[1].split()):
3117            Q[i, j] = float(value)
3118
3119    msg = 'File %s must list time as a monotonuosly ' %filename
3120    msg += 'increasing sequence'
3121    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3122
3123    #Create NetCDF file
3124    from Scientific.IO.NetCDF import NetCDFFile
3125
3126    fid = NetCDFFile(filename + '.tms', 'w')
3127
3128
3129    fid.institution = 'Geoscience Australia'
3130    fid.description = 'Time series'
3131
3132
3133    #Reference point
3134    #Start time in seconds since the epoch (midnight 1/1/1970)
3135    #FIXME: Use Georef
3136    fid.starttime = starttime
3137
3138    # dimension definitions
3139    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3140    #fid.createDimension('number_of_vertices', 3)
3141
3142
3143    fid.createDimension('number_of_timesteps', len(T))
3144
3145    fid.createVariable('time', Float, ('number_of_timesteps',))
3146
3147    fid.variables['time'][:] = T
3148
3149    for i in range(Q.shape[1]):
3150        try:
3151            name = quantity_names[i]
3152        except:
3153            name = 'Attribute%d'%i
3154
3155        fid.createVariable(name, Float, ('number_of_timesteps',))
3156        fid.variables[name][:] = Q[:,i]
3157
3158    fid.close()
3159
3160
3161def extent_sww(file_name):
3162    """
3163    Read in an sww file.
3164
3165    Input;
3166    file_name - the sww file
3167
3168    Output;
3169    z - Vector of bed elevation
3170    volumes - Array.  Each row has 3 values, representing
3171    the vertices that define the volume
3172    time - Vector of the times where there is stage information
3173    stage - array with respect to time and vertices (x,y)
3174    """
3175
3176
3177    from Scientific.IO.NetCDF import NetCDFFile
3178
3179    #Check contents
3180    #Get NetCDF
3181    fid = NetCDFFile(file_name, 'r')
3182
3183    # Get the variables
3184    x = fid.variables['x'][:]
3185    y = fid.variables['y'][:]
3186    stage = fid.variables['stage'][:]
3187    #print "stage",stage
3188    #print "stage.shap",stage.shape
3189    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3190    #print "min(stage)",min(stage)
3191
3192    fid.close()
3193
3194    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3195
3196
3197def sww2domain(filename,boundary=None,t=None,\
3198               fail_if_NaN=True,NaN_filler=0\
3199               ,verbose = False,very_verbose = False):
3200    """
3201    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3202
3203    Boundary is not recommended if domain.smooth is not selected, as it
3204    uses unique coordinates, but not unique boundaries. This means that
3205    the boundary file will not be compatable with the coordinates, and will
3206    give a different final boundary, or crash.
3207    """
3208    NaN=9.969209968386869e+036
3209    #initialise NaN.
3210
3211    from Scientific.IO.NetCDF import NetCDFFile
3212    from shallow_water import Domain
3213    from Numeric import asarray, transpose, resize
3214
3215    if verbose: print 'Reading from ', filename
3216    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3217    time = fid.variables['time']       #Timesteps
3218    if t is None:
3219        t = time[-1]
3220    time_interp = get_time_interp(time,t)
3221
3222    # Get the variables as Numeric arrays
3223    x = fid.variables['x'][:]             #x-coordinates of vertices
3224    y = fid.variables['y'][:]             #y-coordinates of vertices
3225    elevation = fid.variables['elevation']     #Elevation
3226    stage = fid.variables['stage']     #Water level
3227    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3228    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3229
3230    starttime = fid.starttime[0]
3231    volumes = fid.variables['volumes'][:] #Connectivity
3232    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3233
3234    conserved_quantities = []
3235    interpolated_quantities = {}
3236    other_quantities = []
3237
3238    # get geo_reference
3239    #sww files don't have to have a geo_ref
3240    try:
3241        geo_reference = Geo_reference(NetCDFObject=fid)
3242    except: #AttributeError, e:
3243        geo_reference = None
3244
3245    if verbose: print '    getting quantities'
3246    for quantity in fid.variables.keys():
3247        dimensions = fid.variables[quantity].dimensions
3248        if 'number_of_timesteps' in dimensions:
3249            conserved_quantities.append(quantity)
3250            interpolated_quantities[quantity]=\
3251                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3252        else: other_quantities.append(quantity)
3253
3254    other_quantities.remove('x')
3255    other_quantities.remove('y')
3256    other_quantities.remove('z')
3257    other_quantities.remove('volumes')
3258    try:
3259        other_quantities.remove('stage_range')
3260        other_quantities.remove('xmomentum_range')
3261        other_quantities.remove('ymomentum_range')
3262        other_quantities.remove('elevation_range')
3263    except:
3264        pass
3265       
3266
3267    conserved_quantities.remove('time')
3268
3269    if verbose: print '    building domain'
3270    #    From domain.Domain:
3271    #    domain = Domain(coordinates, volumes,\
3272    #                    conserved_quantities = conserved_quantities,\
3273    #                    other_quantities = other_quantities,zone=zone,\
3274    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3275
3276    #   From shallow_water.Domain:
3277    coordinates=coordinates.tolist()
3278    volumes=volumes.tolist()
3279    #FIXME:should this be in mesh?(peter row)
3280    if fid.smoothing == 'Yes': unique = False
3281    else: unique = True
3282    if unique:
3283        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3284
3285
3286    try:
3287        domain = Domain(coordinates, volumes, boundary)
3288    except AssertionError, e:
3289        fid.close()
3290        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3291        raise DataDomainError, msg
3292
3293    if not boundary is None:
3294        domain.boundary = boundary
3295
3296    domain.geo_reference = geo_reference
3297
3298    domain.starttime=float(starttime)+float(t)
3299    domain.time=0.0
3300
3301    for quantity in other_quantities:
3302        try:
3303            NaN = fid.variables[quantity].missing_value
3304        except:
3305            pass #quantity has no missing_value number
3306        X = fid.variables[quantity][:]
3307        if very_verbose:
3308            print '       ',quantity
3309            print '        NaN =',NaN
3310            print '        max(X)'
3311            print '       ',max(X)
3312            print '        max(X)==NaN'
3313            print '       ',max(X)==NaN
3314            print ''
3315        if (max(X)==NaN) or (min(X)==NaN):
3316            if fail_if_NaN:
3317                msg = 'quantity "%s" contains no_data entry'%quantity
3318                raise DataMissingValuesError, msg
3319            else:
3320                data = (X<>NaN)
3321                X = (X*data)+(data==0)*NaN_filler
3322        if unique:
3323            X = resize(X,(len(X)/3,3))
3324        domain.set_quantity(quantity,X)
3325    #
3326    for quantity in conserved_quantities:
3327        try:
3328            NaN = fid.variables[quantity].missing_value
3329        except:
3330            pass #quantity has no missing_value number
3331        X = interpolated_quantities[quantity]
3332        if very_verbose:
3333            print '       ',quantity
3334            print '        NaN =',NaN
3335            print '        max(X)'
3336            print '       ',max(X)
3337            print '        max(X)==NaN'
3338            print '       ',max(X)==NaN
3339            print ''
3340        if (max(X)==NaN) or (min(X)==NaN):
3341            if fail_if_NaN:
3342                msg = 'quantity "%s" contains no_data entry'%quantity
3343                raise DataMissingValuesError, msg
3344            else:
3345                data = (X<>NaN)
3346                X = (X*data)+(data==0)*NaN_filler
3347        if unique:
3348            X = resize(X,(X.shape[0]/3,3))
3349        domain.set_quantity(quantity,X)
3350
3351    fid.close()
3352    return domain
3353
3354def interpolated_quantity(saved_quantity,time_interp):
3355
3356    #given an index and ratio, interpolate quantity with respect to time.
3357    index,ratio = time_interp
3358    Q = saved_quantity
3359    if ratio > 0:
3360        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3361    else:
3362        q = Q[index]
3363    #Return vector of interpolated values
3364    return q
3365
3366def get_time_interp(time,t=None):
3367    #Finds the ratio and index for time interpolation.
3368    #It is borrowed from previous abstract_2d_finite_volumes code.
3369    if t is None:
3370        t=time[-1]
3371        index = -1
3372        ratio = 0.
3373    else:
3374        T = time
3375        tau = t
3376        index=0
3377        msg = 'Time interval derived from file %s [%s:%s]'\
3378            %('FIXMEfilename', T[0], T[-1])
3379        msg += ' does not match model time: %s' %tau
3380        if tau < time[0]: raise DataTimeError, msg
3381        if tau > time[-1]: raise DataTimeError, msg
3382        while tau > time[index]: index += 1
3383        while tau < time[index]: index -= 1
3384        if tau == time[index]:
3385            #Protect against case where tau == time[-1] (last time)
3386            # - also works in general when tau == time[i]
3387            ratio = 0
3388        else:
3389            #t is now between index and index+1
3390            ratio = (tau - time[index])/(time[index+1] - time[index])
3391    return (index,ratio)
3392
3393
3394def weed(coordinates,volumes,boundary = None):
3395    if type(coordinates)==ArrayType:
3396        coordinates = coordinates.tolist()
3397    if type(volumes)==ArrayType:
3398        volumes = volumes.tolist()
3399
3400    unique = False
3401    point_dict = {}
3402    same_point = {}
3403    for i in range(len(coordinates)):
3404        point = tuple(coordinates[i])
3405        if point_dict.has_key(point):
3406            unique = True
3407            same_point[i]=point
3408            #to change all point i references to point j
3409        else:
3410            point_dict[point]=i
3411            same_point[i]=point
3412
3413    coordinates = []
3414    i = 0
3415    for point in point_dict.keys():
3416        point = tuple(point)
3417        coordinates.append(list(point))
3418        point_dict[point]=i
3419        i+=1
3420
3421
3422    for volume in volumes:
3423        for i in range(len(volume)):
3424            index = volume[i]
3425            if index>-1:
3426                volume[i]=point_dict[same_point[index]]
3427
3428    new_boundary = {}
3429    if not boundary is None:
3430        for segment in boundary.keys():
3431            point0 = point_dict[same_point[segment[0]]]
3432            point1 = point_dict[same_point[segment[1]]]
3433            label = boundary[segment]
3434            #FIXME should the bounday attributes be concaterated
3435            #('exterior, pond') or replaced ('pond')(peter row)
3436
3437            if new_boundary.has_key((point0,point1)):
3438                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3439                                              #+','+label
3440
3441            elif new_boundary.has_key((point1,point0)):
3442                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3443                                              #+','+label
3444            else: new_boundary[(point0,point1)]=label
3445
3446        boundary = new_boundary
3447
3448    return coordinates,volumes,boundary
3449
3450
3451def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3452                 verbose=False):
3453    """Read Digitial Elevation model from the following NetCDF format (.dem)
3454
3455    Example:
3456
3457    ncols         3121
3458    nrows         1800
3459    xllcorner     722000
3460    yllcorner     5893000
3461    cellsize      25
3462    NODATA_value  -9999
3463    138.3698 137.4194 136.5062 135.5558 ..........
3464
3465    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3466    """
3467
3468    import os
3469    from Scientific.IO.NetCDF import NetCDFFile
3470    from Numeric import Float, zeros, sum, reshape, equal
3471
3472    root = basename_in
3473    inname = root + '.dem'
3474
3475    #Open existing netcdf file to read
3476    infile = NetCDFFile(inname, 'r')
3477    if verbose: print 'Reading DEM from %s' %inname
3478
3479    #Read metadata
3480    ncols = infile.ncols[0]
3481    nrows = infile.nrows[0]
3482    xllcorner = infile.xllcorner[0]
3483    yllcorner = infile.yllcorner[0]
3484    cellsize = infile.cellsize[0]
3485    NODATA_value = infile.NODATA_value[0]
3486    zone = infile.zone[0]
3487    false_easting = infile.false_easting[0]
3488    false_northing = infile.false_northing[0]
3489    projection = infile.projection
3490    datum = infile.datum
3491    units = infile.units
3492
3493    dem_elevation = infile.variables['elevation']
3494
3495    #Get output file name
3496    if basename_out == None:
3497        outname = root + '_' + repr(cellsize_new) + '.dem'
3498    else:
3499        outname = basename_out + '.dem'
3500
3501    if verbose: print 'Write decimated NetCDF file to %s' %outname
3502
3503    #Determine some dimensions for decimated grid
3504    (nrows_stencil, ncols_stencil) = stencil.shape
3505    x_offset = ncols_stencil / 2
3506    y_offset = nrows_stencil / 2
3507    cellsize_ratio = int(cellsize_new / cellsize)
3508    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3509    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3510
3511    #Open netcdf file for output
3512    outfile = NetCDFFile(outname, 'w')
3513
3514    #Create new file
3515    outfile.institution = 'Geoscience Australia'
3516    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3517                           'of spatial point data'
3518    #Georeferencing
3519    outfile.zone = zone
3520    outfile.projection = projection
3521    outfile.datum = datum
3522    outfile.units = units
3523
3524    outfile.cellsize = cellsize_new
3525    outfile.NODATA_value = NODATA_value
3526    outfile.false_easting = false_easting
3527    outfile.false_northing = false_northing
3528
3529    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3530    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3531    outfile.ncols = ncols_new
3532    outfile.nrows = nrows_new
3533
3534    # dimension definition
3535    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3536
3537    # variable definition
3538    outfile.createVariable('elevation', Float, ('number_of_points',))
3539
3540    # Get handle to the variable
3541    elevation = outfile.variables['elevation']
3542
3543    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3544
3545    #Store data
3546    global_index = 0
3547    for i in range(nrows_new):
3548        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3549        lower_index = global_index
3550        telev =  zeros(ncols_new, Float)
3551        local_index = 0
3552        trow = i * cellsize_ratio
3553
3554        for j in range(ncols_new):
3555            tcol = j * cellsize_ratio
3556            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3557
3558            #if dem contains 1 or more NODATA_values set value in
3559            #decimated dem to NODATA_value, else compute decimated
3560            #value using stencil
3561            if sum(sum(equal(tmp, NODATA_value))) > 0:
3562                telev[local_index] = NODATA_value
3563            else:
3564                telev[local_index] = sum(sum(tmp * stencil))
3565
3566            global_index += 1
3567            local_index += 1
3568
3569        upper_index = global_index
3570
3571        elevation[lower_index:upper_index] = telev
3572
3573    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3574
3575    infile.close()
3576    outfile.close()
3577
3578
3579
3580
3581def tsh2sww(filename, verbose=False): 
3582    """
3583    to check if a tsh/msh file 'looks' good.
3584    """
3585
3586
3587    if verbose == True:print 'Creating domain from', filename
3588    domain = pmesh_to_domain_instance(filename, Domain)
3589    if verbose == True:print "Number of triangles = ", len(domain)
3590
3591    domain.smooth = True
3592    domain.format = 'sww'   #Native netcdf visualisation format
3593    file_path, filename = path.split(filename)
3594    filename, ext = path.splitext(filename)
3595    domain.set_name(filename)   
3596    domain.reduction = mean
3597    if verbose == True:print "file_path",file_path
3598    if file_path == "":file_path = "."
3599    domain.set_datadir(file_path)
3600
3601    if verbose == True:
3602        print "Output written to " + domain.get_datadir() + sep + \
3603              domain.get_name() + "." + domain.format
3604    sww = get_dataobject(domain)
3605    sww.store_connectivity()
3606    sww.store_timestep()
3607
3608
3609def asc_csiro2sww(bath_dir,
3610                  elevation_dir,
3611                  ucur_dir,
3612                  vcur_dir,
3613                  sww_file,
3614                  minlat = None, maxlat = None,
3615                  minlon = None, maxlon = None,
3616                  zscale=1,
3617                  mean_stage = 0,
3618                  fail_on_NaN = True,
3619                  elevation_NaN_filler = 0,
3620                  bath_prefix='ba',
3621                  elevation_prefix='el',
3622                  verbose=False):
3623    """
3624    Produce an sww boundary file, from esri ascii data from CSIRO.
3625
3626    Also convert latitude and longitude to UTM. All coordinates are
3627    assumed to be given in the GDA94 datum.
3628
3629    assume:
3630    All files are in esri ascii format
3631
3632    4 types of information
3633    bathymetry
3634    elevation
3635    u velocity
3636    v velocity
3637
3638    Assumptions
3639    The metadata of all the files is the same
3640    Each type is in a seperate directory
3641    One bath file with extention .000
3642    The time period is less than 24hrs and uniform.
3643    """
3644    from Scientific.IO.NetCDF import NetCDFFile
3645
3646    from anuga.coordinate_transforms.redfearn import redfearn
3647
3648    precision = Float # So if we want to change the precision its done here
3649
3650    # go in to the bath dir and load the only file,
3651    bath_files = os.listdir(bath_dir)
3652
3653    bath_file = bath_files[0]
3654    bath_dir_file =  bath_dir + os.sep + bath_file
3655    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3656
3657    #Use the date.time of the bath file as a basis for
3658    #the start time for other files
3659    base_start = bath_file[-12:]
3660
3661    #go into the elevation dir and load the 000 file
3662    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3663                         + base_start
3664
3665    elevation_files = os.listdir(elevation_dir)
3666    ucur_files = os.listdir(ucur_dir)
3667    vcur_files = os.listdir(vcur_dir)
3668    elevation_files.sort()
3669    # the first elevation file should be the
3670    # file with the same base name as the bath data
3671    assert elevation_files[0] == 'el' + base_start
3672
3673    number_of_latitudes = bath_grid.shape[0]
3674    number_of_longitudes = bath_grid.shape[1]
3675    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3676
3677    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3678                  for x in range(number_of_longitudes)]
3679    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3680                 for y in range(number_of_latitudes)]
3681
3682     # reverse order of lat, so the fist lat represents the first grid row
3683    latitudes.reverse()
3684
3685    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3686                                                 minlat=minlat, maxlat=maxlat,
3687                                                 minlon=minlon, maxlon=maxlon)
3688
3689
3690    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3691    latitudes = latitudes[kmin:kmax]
3692    longitudes = longitudes[lmin:lmax]
3693    number_of_latitudes = len(latitudes)
3694    number_of_longitudes = len(longitudes)
3695    number_of_times = len(os.listdir(elevation_dir))
3696    number_of_points = number_of_latitudes*number_of_longitudes
3697    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3698
3699    #Work out the times
3700    if len(elevation_files) > 1:
3701        # Assume: The time period is less than 24hrs.
3702        time_period = (int(elevation_files[1][-3:]) - \
3703                      int(elevation_files[0][-3:]))*60*60
3704        times = [x*time_period for x in range(len(elevation_files))]
3705    else:
3706        times = [0.0]
3707
3708
3709    if verbose:
3710        print '------------------------------------------------'
3711        print 'Statistics:'
3712        print '  Extent (lat/lon):'
3713        print '    lat in [%f, %f], len(lat) == %d'\
3714              %(min(latitudes), max(latitudes),
3715                len(latitudes))
3716        print '    lon in [%f, %f], len(lon) == %d'\
3717              %(min(longitudes), max(longitudes),
3718                len(longitudes))
3719        print '    t in [%f, %f], len(t) == %d'\
3720              %(min(times), max(times), len(times))
3721
3722    ######### WRITE THE SWW FILE #############
3723    # NetCDF file definition
3724    outfile = NetCDFFile(sww_file, 'w')
3725
3726    #Create new file
3727    outfile.institution = 'Geoscience Australia'
3728    outfile.description = 'Converted from XXX'
3729
3730
3731    #For sww compatibility
3732    outfile.smoothing = 'Yes'
3733    outfile.order = 1
3734
3735    #Start time in seconds since the epoch (midnight 1/1/1970)
3736    outfile.starttime = starttime = times[0]
3737
3738
3739    # dimension definitions
3740    outfile.createDimension('number_of_volumes', number_of_volumes)
3741
3742    outfile.createDimension('number_of_vertices', 3)
3743    outfile.createDimension('number_of_points', number_of_points)
3744    outfile.createDimension('number_of_timesteps', number_of_times)
3745
3746    # variable definitions
3747    outfile.createVariable('x', precision, ('number_of_points',))
3748    outfile.createVariable('y', precision, ('number_of_points',))
3749    outfile.createVariable('elevation', precision, ('number_of_points',))
3750
3751    #FIXME: Backwards compatibility
3752    outfile.createVariable('z', precision, ('number_of_points',))
3753    #################################
3754
3755    outfile.createVariable('volumes', Int, ('number_of_volumes',
3756                                            'number_of_vertices'))
3757
3758    outfile.createVariable('time', precision,
3759                           ('number_of_timesteps',))
3760
3761    outfile.createVariable('stage', precision,
3762                           ('number_of_timesteps',
3763                            'number_of_points'))
3764
3765    outfile.createVariable('xmomentum', precision,
3766                           ('number_of_timesteps',
3767                            'number_of_points'))
3768
3769    outfile.createVariable('ymomentum', precision,
3770                           ('number_of_timesteps',
3771                            'number_of_points'))
3772
3773    #Store
3774    from anuga.coordinate_transforms.redfearn import redfearn
3775    x = zeros(number_of_points, Float)  #Easting
3776    y = zeros(number_of_points, Float)  #Northing
3777
3778    if verbose: print 'Making triangular grid'
3779    #Get zone of 1st point.
3780    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3781
3782    vertices = {}
3783    i = 0
3784    for k, lat in enumerate(latitudes):
3785        for l, lon in enumerate(longitudes):
3786
3787            vertices[l,k] = i
3788
3789            zone, easting, northing = redfearn(lat,lon)
3790
3791            msg = 'Zone boundary crossed at longitude =', lon
3792            #assert zone == refzone, msg
3793            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3794            x[i] = easting
3795            y[i] = northing
3796            i += 1
3797
3798
3799    #Construct 2 triangles per 'rectangular' element
3800    volumes = []
3801    for l in range(number_of_longitudes-1):    #X direction
3802        for k in range(number_of_latitudes-1): #Y direction
3803            v1 = vertices[l,k+1]
3804            v2 = vertices[l,k]
3805            v3 = vertices[l+1,k+1]
3806            v4 = vertices[l+1,k]
3807
3808            #Note, this is different to the ferrit2sww code
3809            #since the order of the lats is reversed.
3810            volumes.append([v1,v3,v2]) #Upper element
3811            volumes.append([v4,v2,v3]) #Lower element
3812
3813    volumes = array(volumes)
3814
3815    geo_ref = Geo_reference(refzone,min(x),min(y))
3816    geo_ref.write_NetCDF(outfile)
3817
3818    # This will put the geo ref in the middle
3819    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3820
3821
3822    if verbose:
3823        print '------------------------------------------------'
3824        print 'More Statistics:'
3825        print '  Extent (/lon):'
3826        print '    x in [%f, %f], len(lat) == %d'\
3827              %(min(x), max(x),
3828                len(x))
3829        print '    y in [%f, %f], len(lon) == %d'\
3830              %(min(y), max(y),
3831                len(y))
3832        print 'geo_ref: ',geo_ref
3833
3834    z = resize(bath_grid,outfile.variables['z'][:].shape)
3835    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3836    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3837    outfile.variables['z'][:] = z
3838    outfile.variables['elevation'][:] = z  #FIXME HACK
3839    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3840
3841    stage = outfile.variables['stage']
3842    xmomentum = outfile.variables['xmomentum']
3843    ymomentum = outfile.variables['ymomentum']
3844
3845    outfile.variables['time'][:] = times   #Store time relative
3846
3847    if verbose: print 'Converting quantities'
3848    n = number_of_times
3849    for j in range(number_of_times):
3850        # load in files
3851        elevation_meta, elevation_grid = \
3852           _read_asc(elevation_dir + os.sep + elevation_files[j])
3853
3854        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3855        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3856
3857        #cut matrix to desired size
3858        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3859        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3860        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3861       
3862        # handle missing values
3863        missing = (elevation_grid == elevation_meta['NODATA_value'])
3864        if sometrue (missing):
3865            if fail_on_NaN:
3866                msg = 'File %s contains missing values'\
3867                      %(elevation_files[j])
3868                raise DataMissingValuesError, msg
3869            else:
3870                elevation_grid = elevation_grid*(missing==0) + \
3871                                 missing*elevation_NaN_filler
3872
3873
3874        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3875        i = 0
3876        for k in range(number_of_latitudes):      #Y direction
3877            for l in range(number_of_longitudes): #X direction
3878                w = zscale*elevation_grid[k,l] + mean_stage
3879                stage[j,i] = w
3880                h = w - z[i]
3881                xmomentum[j,i] = u_momentum_grid[k,l]*h
3882                ymomentum[j,i] = v_momentum_grid[k,l]*h
3883                i += 1
3884    outfile.close()
3885
3886def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3887                        minlat=None, maxlat=None,
3888                        minlon=None, maxlon=None):
3889    """
3890    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3891    specified with min/max lat/long
3892
3893    Think of the latitudes and longitudes describing a 2d surface.
3894    The area returned is, if possible, just big enough to cover the
3895    inputed max/min area. (This will not be possible if the max/min area
3896    has a section outside of the latitudes/longitudes area.)
3897
3898    asset  longitudes are sorted,
3899    long - from low to high (west to east, eg 148 - 151)
3900    assert latitudes are sorted, ascending or decending
3901    """
3902    latitudes = latitudes_ref[:]
3903    longitudes = longitudes_ref[:]
3904
3905    latitudes = ensure_numeric(latitudes)
3906    longitudes = ensure_numeric(longitudes)
3907   
3908    assert allclose(sort(longitudes), longitudes)
3909   
3910    lat_ascending = True
3911    if not allclose(sort(latitudes), latitudes):
3912        lat_ascending = False
3913        # reverse order of lat, so it's in ascending order         
3914        latitudes = latitudes[::-1]
3915        assert allclose(sort(latitudes), latitudes)
3916    #print "latitudes  in funct", latitudes
3917   
3918    largest_lat_index = len(latitudes)-1
3919    #Cut out a smaller extent.
3920    if minlat == None:
3921        lat_min_index = 0
3922    else:
3923        lat_min_index = searchsorted(latitudes, minlat)-1
3924        if lat_min_index <0:
3925            lat_min_index = 0
3926
3927
3928    if maxlat == None:
3929        lat_max_index = largest_lat_index #len(latitudes)
3930    else:
3931        lat_max_index = searchsorted(latitudes, maxlat)
3932        if lat_max_index > largest_lat_index:
3933            lat_max_index = largest_lat_index
3934
3935    if minlon == None:
3936        lon_min_index = 0
3937    else:
3938        lon_min_index = searchsorted(longitudes, minlon)-1
3939        if lon_min_index <0:
3940            lon_min_index = 0
3941
3942    if maxlon == None:
3943        lon_max_index = len(longitudes)
3944    else:
3945        lon_max_index = searchsorted(longitudes, maxlon)
3946
3947    # Reversing the indexes, if the lat array is decending
3948    if lat_ascending is False:
3949        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3950                                       largest_lat_index - lat_min_index
3951    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3952    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3953
3954    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3955
3956
3957def _read_asc(filename, verbose=False):
3958    """Read esri file from the following ASCII format (.asc)
3959
3960    Example:
3961
3962    ncols         3121
3963    nrows         1800
3964    xllcorner     722000
3965    yllcorner     5893000
3966    cellsize      25
3967    NODATA_value  -9999
3968    138.3698 137.4194 136.5062 135.5558 ..........
3969
3970    """
3971
3972    datafile = open(filename)
3973
3974    if verbose: print 'Reading DEM from %s' %(filename)
3975    lines = datafile.readlines()
3976    datafile.close()
3977
3978    if verbose: print 'Got', len(lines), ' lines'
3979
3980    ncols = int(lines.pop(0).split()[1].strip())
3981    nrows = int(lines.pop(0).split()[1].strip())
3982    xllcorner = float(lines.pop(0).split()[1].strip())
3983    yllcorner = float(lines.pop(0).split()[1].strip())
3984    cellsize = float(lines.pop(0).split()[1].strip())
3985    NODATA_value = float(lines.pop(0).split()[1].strip())
3986
3987    assert len(lines) == nrows
3988
3989    #Store data
3990    grid = []
3991
3992    n = len(lines)
3993    for i, line in enumerate(lines):
3994        cells = line.split()
3995        assert len(cells) == ncols
3996        grid.append(array([float(x) for x in cells]))
3997    grid = array(grid)
3998
3999    return {'xllcorner':xllcorner,
4000            'yllcorner':yllcorner,
4001            'cellsize':cellsize,
4002            'NODATA_value':NODATA_value}, grid
4003
4004
4005
4006    ####  URS 2 SWW  ###
4007
4008lon_name = 'LON'
4009lat_name = 'LAT'
4010time_name = 'TIME'
4011precision = Float # So if we want to change the precision its done here       
4012class Write_nc:
4013    """
4014    Write an nc file.
4015   
4016    Note, this should be checked to meet cdc netcdf conventions for gridded
4017    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4018   
4019    """
4020    def __init__(self,
4021                 quantity_name,
4022                 file_name,
4023                 time_step_count,
4024                 time_step,
4025                 lon,
4026                 lat):
4027        """
4028        time_step_count is the number of time steps.
4029        time_step is the time step size
4030       
4031        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4032        """
4033        self.quantity_name = quantity_name
4034        quantity_units = {'HA':'CENTIMETERS',
4035                              'UA':'CENTIMETERS/SECOND',
4036                              'VA':'CENTIMETERS/SECOND'}       
4037       
4038        multiplier_dic = {'HA':100.0, # To convert from m to cm
4039                              'UA':100.0,  #  m/s to cm/sec
4040                              'VA':-100.0}  # MUX files have positve x in the
4041        # Southern direction.  This corrects for it, when writing nc files.
4042       
4043        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4044       
4045        #self.file_name = file_name
4046        self.time_step_count = time_step_count
4047        self.time_step = time_step
4048
4049        # NetCDF file definition
4050        self.outfile = NetCDFFile(file_name, 'w')
4051        outfile = self.outfile       
4052
4053        #Create new file
4054        nc_lon_lat_header(outfile, lon, lat)
4055   
4056        # TIME
4057        outfile.createDimension(time_name, None)
4058        outfile.createVariable(time_name, precision, (time_name,))
4059
4060        #QUANTITY
4061        outfile.createVariable(self.quantity_name, precision,
4062                               (time_name, lat_name, lon_name))
4063        outfile.variables[self.quantity_name].missing_value=-1.e+034
4064        outfile.variables[self.quantity_name].units= \
4065                                 quantity_units[self.quantity_name]
4066        outfile.variables[lon_name][:]= ensure_numeric(lon)
4067        outfile.variables[lat_name][:]= ensure_numeric(lat)
4068
4069        #Assume no one will be wanting to read this, while we are writing
4070        #outfile.close()
4071       
4072    def store_timestep(self, quantity_slice):
4073        """
4074        Write a time slice of quantity info
4075        quantity_slice is the data to be stored at this time step
4076        """
4077       
4078        outfile = self.outfile
4079       
4080        # Get the variables
4081        time = outfile.variables[time_name]
4082        quantity = outfile.variables[self.quantity_name]
4083           
4084        i = len(time)
4085
4086        #Store time
4087        time[i] = i*self.time_step #self.domain.time
4088        quantity[i,:] = quantity_slice* self.quantity_multiplier
4089       
4090    def close(self):
4091        self.outfile.close()
4092
4093def urs2sww(basename_in='o', basename_out=None, verbose=False,
4094            remove_nc_files=True,
4095            minlat=None, maxlat=None,
4096            minlon= None, maxlon=None,
4097            mint=None, maxt=None,
4098            mean_stage=0,
4099            origin = None,
4100            zscale=1,
4101            fail_on_NaN=True,
4102            NaN_filler=0,
4103            elevation=None):
4104    """
4105    Convert URS C binary format for wave propagation to
4106    sww format native to abstract_2d_finite_volumes.
4107
4108    Specify only basename_in and read files of the form
4109    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4110    basefilename_waveheight-n-mux containing relative height,
4111    x-velocity and y-velocity, respectively.
4112
4113    Also convert latitude and longitude to UTM. All coordinates are
4114    assumed to be given in the GDA94 datum. The latitude and longitude
4115    information is for  a grid.
4116
4117    min's and max's: If omitted - full extend is used.
4118    To include a value min may equal it, while max must exceed it.
4119    Lat and lon are assumed to be in decimal degrees.
4120    NOTE: minlon is the most east boundary.
4121   
4122    origin is a 3-tuple with geo referenced
4123    UTM coordinates (zone, easting, northing)
4124    It will be the origin of the sww file. This shouldn't be used,
4125    since all of anuga should be able to handle an arbitary origin.
4126
4127
4128    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4129    which means that latitude is the fastest
4130    varying dimension (row major order, so to speak)
4131
4132    In URS C binary the latitudes and longitudes are in assending order.
4133    """
4134    if basename_out == None:
4135        basename_out = basename_in
4136    files_out = urs2nc(basename_in, basename_out)
4137    ferret2sww(basename_out,
4138               minlat=minlat,
4139               maxlat=maxlat,
4140               minlon=minlon,
4141               maxlon=maxlon,
4142               mint=mint,
4143               maxt=maxt,
4144               mean_stage=mean_stage,
4145               origin=origin,
4146               zscale=zscale,
4147               fail_on_NaN=fail_on_NaN,
4148               NaN_filler=NaN_filler,
4149               inverted_bathymetry=True,
4150               verbose=verbose)
4151    #print "files_out",files_out
4152    if remove_nc_files:
4153        for file_out in files_out:
4154            os.remove(file_out)
4155   
4156def urs2nc(basename_in = 'o', basename_out = 'urs'):
4157    """
4158    Convert the 3 urs files to 4 nc files.
4159
4160    The name of the urs file names must be;
4161    [basename_in]_velocity-z-mux
4162    [basename_in]_velocity-e-mux
4163    [basename_in]_waveheight-n-mux
4164   
4165    """
4166   
4167    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4168                basename_in + EAST_VELOCITY_LABEL,
4169                basename_in + NORTH_VELOCITY_LABEL]
4170    files_out = [basename_out+'_ha.nc',
4171                 basename_out+'_ua.nc',
4172                 basename_out+'_va.nc']
4173    quantities = ['HA','UA','VA']
4174
4175    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
4176    for i, file_name in enumerate(files_in):
4177        if os.access(file_name, os.F_OK) == 0:
4178            if os.access(file_name+'.mux', os.F_OK) == 0 :
4179                msg = 'File %s does not exist or is not accessible' %file_name
4180                raise IOError, msg
4181            else:
4182               files_in[i] += '.mux'
4183               print "file_name", file_name
4184    hashed_elevation = None
4185    for file_in, file_out, quantity in map(None, files_in,
4186                                           files_out,
4187                                           quantities):
4188        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4189                                         file_out,
4190                                         quantity)
4191        #print "lonlatdep", lonlatdep
4192        if hashed_elevation == None:
4193            elevation_file = basename_out+'_e.nc'
4194            write_elevation_nc(elevation_file,
4195                                lon,
4196                                lat,
4197                                depth)
4198            hashed_elevation = myhash(lonlatdep)
4199        else:
4200            msg = "The elevation information in the mux files is inconsistent"
4201            assert hashed_elevation == myhash(lonlatdep), msg
4202    files_out.append(elevation_file)
4203    return files_out
4204   
4205def _binary_c2nc(file_in, file_out, quantity):
4206    """
4207    Reads in a quantity urs file and writes a quantity nc file.
4208    additionally, returns the depth and lat, long info,
4209    so it can be written to a file.
4210    """
4211    columns = 3 # long, lat , depth
4212    mux_file = open(file_in, 'rb')
4213   
4214    # Number of points/stations
4215    (points_num,)= unpack('i',mux_file.read(4))
4216
4217    # nt, int - Number of time steps
4218    (time_step_count,)= unpack('i',mux_file.read(4))
4219
4220    #dt, float - time step, seconds
4221    (time_step,) = unpack('f', mux_file.read(4))
4222   
4223    msg = "Bad data in the mux file."
4224    if points_num < 0:
4225        mux_file.close()
4226        raise ANUGAError, msg
4227    if time_step_count < 0:
4228        mux_file.close()
4229        raise ANUGAError, msg
4230    if time_step < 0:
4231        mux_file.close()
4232        raise ANUGAError, msg
4233   
4234    lonlatdep = p_array.array('f')
4235    lonlatdep.read(mux_file, columns * points_num)
4236    lonlatdep = array(lonlatdep, typecode=Float)   
4237    lonlatdep = reshape(lonlatdep, (points_num, columns))
4238   
4239    lon, lat, depth = lon_lat2grid(lonlatdep)
4240    lon_sorted = list(lon)
4241    lon_sorted.sort()
4242
4243    if not lon == lon_sorted:
4244        msg = "Longitudes in mux file are not in ascending order"
4245        raise IOError, msg
4246    lat_sorted = list(lat)
4247    lat_sorted.sort()
4248
4249    if not lat == lat_sorted:
4250        msg = "Latitudes in mux file are not in ascending order"
4251   
4252    nc_file = Write_nc(quantity,
4253                       file_out,
4254                       time_step_count,
4255                       time_step,
4256                       lon,
4257                       lat)
4258
4259    for i in range(time_step_count):
4260        #Read in a time slice  from mux file 
4261        hz_p_array = p_array.array('f')
4262        hz_p_array.read(mux_file, points_num)
4263        hz_p = array(hz_p_array, typecode=Float)
4264        hz_p = reshape(hz_p, (len(lon), len(lat)))
4265        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4266
4267        #write time slice to nc file
4268        nc_file.store_timestep(hz_p)
4269    mux_file.close()
4270    nc_file.close()
4271
4272    return lonlatdep, lon, lat, depth
4273   
4274
4275def write_elevation_nc(file_out, lon, lat, depth_vector):
4276    """
4277    Write an nc elevation file.
4278    """
4279   
4280    # NetCDF file definition
4281    outfile = NetCDFFile(file_out, 'w')
4282
4283    #Create new file
4284    nc_lon_lat_header(outfile, lon, lat)
4285   
4286    # ELEVATION
4287    zname = 'ELEVATION'
4288    outfile.createVariable(zname, precision, (lat_name, lon_name))
4289    outfile.variables[zname].units='CENTIMETERS'
4290    outfile.variables[zname].missing_value=-1.e+034
4291
4292    outfile.variables[lon_name][:]= ensure_numeric(lon)
4293    outfile.variables[lat_name][:]= ensure_numeric(lat)
4294
4295    depth = reshape(depth_vector, ( len(lat), len(lon)))
4296    outfile.variables[zname][:]= depth
4297   
4298    outfile.close()
4299   
4300def nc_lon_lat_header(outfile, lon, lat):
4301    """
4302    outfile is the netcdf file handle.
4303    lon - a list/array of the longitudes
4304    lat - a list/array of the latitudes
4305    """
4306   
4307    outfile.institution = 'Geoscience Australia'
4308    outfile.description = 'Converted from URS binary C'
4309   
4310    # Longitude
4311    outfile.createDimension(lon_name, len(lon))
4312    outfile.createVariable(lon_name, precision, (lon_name,))
4313    outfile.variables[lon_name].point_spacing='uneven'
4314    outfile.variables[lon_name].units='degrees_east'
4315    outfile.variables[lon_name].assignValue(lon)
4316
4317
4318    # Latitude
4319    outfile.createDimension(lat_name, len(lat))
4320    outfile.createVariable(lat_name, precision, (lat_name,))
4321    outfile.variables[lat_name].point_spacing='uneven'
4322    outfile.variables[lat_name].units='degrees_north'
4323    outfile.variables[lat_name].assignValue(lat)
4324
4325
4326   
4327def lon_lat2grid(long_lat_dep):
4328    """
4329    given a list of points that are assumed to be an a grid,
4330    return the long's and lat's of the grid.
4331    long_lat_dep is an array where each row is a position.
4332    The first column is longitudes.
4333    The second column is latitudes.
4334
4335    The latitude is the fastest varying dimension - in mux files
4336    """
4337    LONG = 0
4338    LAT = 1
4339    QUANTITY = 2
4340
4341    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4342   
4343    num_points = long_lat_dep.shape[0]
4344    this_rows_long = long_lat_dep[0,LONG]
4345
4346    # Count the length of unique latitudes
4347    i = 0
4348    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4349        i += 1
4350    # determine the lats and longsfrom the grid
4351    lat = long_lat_dep[:i, LAT]       
4352    long = long_lat_dep[::i, LONG]
4353   
4354    lenlong = len(long)
4355    lenlat = len(lat)
4356    #print 'len lat', lat, len(lat)
4357    #print 'len long', long, len(long)
4358         
4359    msg = 'Input data is not gridded'     
4360    assert num_points % lenlat == 0, msg
4361    assert num_points % lenlong == 0, msg
4362         
4363    # Test that data is gridded       
4364    for i in range(lenlong):
4365        msg = 'Data is not gridded.  It must be for this operation'
4366        first = i*lenlat
4367        last = first + lenlat
4368               
4369        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4370        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4371   
4372   
4373#    print 'range long', min(long), max(long)
4374#    print 'range lat', min(lat), max(lat)
4375#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4376#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4377   
4378   
4379   
4380    msg = 'Out of range latitudes/longitudes'
4381    for l in lat:assert -90 < l < 90 , msg
4382    for l in long:assert -180 < l < 180 , msg
4383
4384    # Changing quantity from lat being the fastest varying dimension to
4385    # long being the fastest varying dimension
4386    # FIXME - make this faster/do this a better way
4387    # use numeric transpose, after reshaping the quantity vector
4388#    quantity = zeros(len(long_lat_dep), Float)
4389    quantity = zeros(num_points, Float)
4390   
4391#    print 'num',num_points
4392    for lat_i, _ in enumerate(lat):
4393        for long_i, _ in enumerate(long):
4394            q_index = lat_i*lenlong+long_i
4395            lld_index = long_i*lenlat+lat_i
4396#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4397            temp = long_lat_dep[lld_index, QUANTITY]
4398            quantity[q_index] = temp
4399           
4400    return long, lat, quantity
4401
4402    ####  END URS 2 SWW  ###
4403
4404    #### URS UNGRIDDED 2 SWW ###
4405
4406    ### PRODUCING THE POINTS NEEDED FILE ###
4407LL_LAT = -50.0
4408LL_LONG = 80.0
4409GRID_SPACING = 1.0/60.0
4410LAT_AMOUNT = 4800
4411LONG_AMOUNT = 3600
4412def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4413                              ll_lat=LL_LAT, ll_long=LL_LONG,
4414                              grid_spacing=GRID_SPACING, 
4415                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4416                              export_csv=False, use_cache=False,
4417                              verbose=False):
4418    """
4419    file_name - name of the urs file produced for David.
4420    boundary_polygon - a list of points that describes a polygon.
4421                      The last point is assumed ot join the first point.
4422                      This is in UTM (lat long would be better though)
4423
4424    ll_lat - lower left latitude, in decimal degrees
4425    ll-long - lower left longitude, in decimal degrees
4426    grid_spacing - in deciamal degrees
4427
4428
4429    Don't add the file extension.  It will be added.
4430    """
4431    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4432                            grid_spacing, 
4433                      lat_amount, long_amount,use_cache, verbose)
4434    if not file_name[-4:] == ".urs":
4435        file_name += ".urs"
4436    geo.export_points_file(file_name)
4437    if export_csv:
4438        if file_name[-4:] == ".urs":
4439            file_name = file_name[:-4] + ".csv"
4440        geo.export_points_file(file_name)
4441
4442def URS_points_needed(boundary_polygon, zone, ll_lat=LL_LAT,
4443                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4444                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4445                      use_cache=False, verbose=False):
4446    args = (boundary_polygon,
4447                      zone)
4448    kwargs = {'ll_lat': ll_lat,
4449              'll_long': ll_long,
4450              'grid_spacing': grid_spacing,
4451              'lat_amount': lat_amount,
4452              'long_amount': long_amount} 
4453    if use_cache is True:
4454        try:
4455            from anuga.caching import cache
4456        except:
4457            msg = 'Caching was requested, but caching module'+\
4458                  'could not be imported'
4459            raise msg
4460
4461
4462        geo = cache(_URS_points_needed,
4463                  args, kwargs,
4464                  verbose=verbose,
4465                  compression=False)
4466    else:
4467        #I was getting 'got multiple values for keyword argument' errors
4468        #geo = apply(_URS_points_needed, args, kwargs)
4469        geo = _URS_points_needed(boundary_polygon,
4470                      zone, ll_lat,
4471                      ll_long, grid_spacing, 
4472                      lat_amount, long_amount)
4473
4474    return geo   
4475def _URS_points_needed(boundary_polygon,
4476                      zone, ll_lat=LL_LAT,
4477                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4478                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT):
4479    """
4480
4481    boundary_polygon - a list of points that describes a polygon.
4482                      The last point is assumed ot join the first point.
4483                      This is in UTM (lat long would be better though)
4484
4485    ll_lat - lower left latitude, in decimal degrees
4486    ll-long - lower left longitude, in decimal degrees
4487    grid_spacing - in deciamal degrees
4488
4489    """
4490   
4491    from sets import ImmutableSet
4492   
4493    msg = "grid_spacing can not be zero"
4494    assert not grid_spacing ==0, msg
4495    a = boundary_polygon
4496    # List of segments.  Each segment is two points.
4497    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4498
4499    # convert the segs to Lat's and longs.
4500   
4501    # Don't assume the zone of the segments is the same as the lower left
4502    # corner of the lat long data!!  They can easily be in different zones
4503   
4504    lat_long_set = ImmutableSet()
4505    for seg in segs:
4506        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4507                      lat_amount, long_amount, zone)
4508        lat_long_set |= ImmutableSet(points_lat_long)
4509    #print "lat_long_set",lat_long_set
4510    geo = Geospatial_data(data_points=list(lat_long_set),
4511                              points_are_lats_longs=True)
4512    return geo
4513   
4514def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4515                  lat_amount, long_amount, zone):
4516    """
4517    return a list of the points, in lats and longs that are needed to
4518    interpolate any point on the segment.
4519    """
4520    from math import sqrt
4521    #print "zone",zone
4522    geo_reference = Geo_reference(zone=zone)
4523    #print "seg",seg
4524    geo = Geospatial_data(seg,geo_reference=geo_reference)
4525    seg_lat_long = geo.get_data_points(as_lat_long=True)
4526    #print "seg_lat_long", seg_lat_long
4527    # 1.415 = 2^0.5, rounded up....
4528    sqrt_2_rounded_up = 1.415
4529    buffer = sqrt_2_rounded_up * grid_spacing
4530   
4531    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4532    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4533    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4534    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4535
4536    first_row = (min_long - ll_long)/grid_spacing
4537    # To round up
4538    first_row_long = int(round(first_row + 0.5))
4539    #print "first_row", first_row_long
4540
4541    last_row = (max_long - ll_long)/grid_spacing # round down
4542    last_row_long = int(round(last_row))
4543    #print "last_row",last_row _long
4544   
4545    first_row = (min_lat - ll_lat)/grid_spacing
4546    # To round up
4547    first_row_lat = int(round(first_row + 0.5))
4548    #print "first_row", first_row_lat
4549
4550    last_row = (max_lat - ll_lat)/grid_spacing # round down
4551    last_row_lat = int(round(last_row))
4552    #print "last_row",last_row_lat
4553
4554    # to work out the max distance -
4555    # 111120 - horizontal distance between 1 deg latitude.
4556    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4557    max_distance = 157147.4112 * grid_spacing
4558    #print "max_distance", max_distance #2619.12 m for 1 minute
4559    points_lat_long = []
4560    # Create a list of the lat long points to include.
4561    for index_lat in range(first_row_lat, last_row_lat + 1):
4562        for index_long in range(first_row_long, last_row_long + 1):
4563            lat = ll_lat + index_lat*grid_spacing
4564            long = ll_long + index_long*grid_spacing
4565
4566            #filter here to keep good points
4567            if keep_point(lat, long, seg, max_distance):
4568                points_lat_long.append((lat, long)) #must be hashable
4569    #print "points_lat_long", points_lat_long
4570
4571    # Now that we have these points, lets throw ones out that are too far away
4572    return points_lat_long
4573
4574def keep_point(lat, long, seg, max_distance):
4575    """
4576    seg is two points, UTM
4577    """
4578    from math import sqrt
4579    _ , x0, y0 = redfearn(lat, long)
4580    x1 = seg[0][0]
4581    y1 = seg[0][1]
4582    x2 = seg[1][0]
4583    y2 = seg[1][1]
4584
4585    x2_1 = x2-x1
4586    y2_1 = y2-y1
4587    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4588    if d <= max_distance:
4589        return True
4590    else:
4591        return False
4592   
4593    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4594   
4595WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
4596EAST_VELOCITY_LABEL =  '_velocity-e-mux'
4597NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
4598def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4599                      mint=None, maxt=None,
4600                      mean_stage=0,
4601                      origin=None,
4602                      hole_points_UTM=None,
4603                      zscale=1):
4604    """   
4605    Convert URS C binary format for wave propagation to
4606    sww format native to abstract_2d_finite_volumes.
4607
4608
4609    Specify only basename_in and read files of the form
4610    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4611    basefilename_waveheight-n-mux containing relative height,
4612    x-velocity and y-velocity, respectively.
4613
4614    Also convert latitude and longitude to UTM. All coordinates are
4615    assumed to be given in the GDA94 datum. The latitude and longitude
4616    information is assumed ungridded grid.
4617
4618    min's and max's: If omitted - full extend is used.
4619    To include a value min ans max may equal it.
4620    Lat and lon are assumed to be in decimal degrees.
4621   
4622    origin is a 3-tuple with geo referenced
4623    UTM coordinates (zone, easting, northing)
4624    It will be the origin of the sww file. This shouldn't be used,
4625    since all of anuga should be able to handle an arbitary origin.
4626    The mux point info is NOT relative to this origin.
4627
4628
4629    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4630    which means that latitude is the fastest
4631    varying dimension (row major order, so to speak)
4632
4633    In URS C binary the latitudes and longitudes are in assending order.
4634
4635    Note, interpolations of the resulting sww file will be different
4636    from results of urs2sww.  This is due to the interpolation
4637    function used, and the different grid structure between urs2sww
4638    and this function.
4639   
4640    Interpolating data that has an underlying gridded source can
4641    easily end up with different values, depending on the underlying
4642    mesh.
4643
4644    consider these 4 points
4645    50  -50
4646
4647    0     0
4648
4649    The grid can be
4650     -
4651    |\|    A
4652     -
4653     or;
4654      -
4655     |/|   B
4656      -
4657      If a point is just below the center of the midpoint, it will have a
4658      +ve value in grid A and a -ve value in grid B.
4659    """ 
4660    from anuga.mesh_engine.mesh_engine import NoTrianglesError
4661    from anuga.pmesh.mesh import Mesh
4662
4663    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4664                basename_in + EAST_VELOCITY_LABEL,
4665                basename_in + NORTH_VELOCITY_LABEL]
4666    quantities = ['HA','UA','VA']
4667
4668    # instanciate urs_points of the three mux files.
4669    mux = {}
4670    for quantity, file in map(None, quantities, files_in):
4671        mux[quantity] = Urs_points(file)
4672       
4673    # Could check that the depth is the same. (hashing)
4674
4675    # handle to a mux file to do depth stuff
4676    a_mux = mux[quantities[0]]
4677   
4678    # Convert to utm
4679    lat = a_mux.lonlatdep[:,1]
4680    long = a_mux.lonlatdep[:,0]
4681    points_utm, zone = convert_from_latlon_to_utm( \
4682        latitudes=lat, longitudes=long)
4683    #print "points_utm", points_utm
4684    #print "zone", zone
4685
4686    elevation = a_mux.lonlatdep[:,2] * -1 #
4687   
4688    # grid ( create a mesh from the selected points)
4689    # This mesh has a problem.  Triangles are streched over ungridded areas.
4690    #  If these areas could be described as holes in pmesh, that would be great
4691
4692    # I can't just get the user to selection a point in the middle.
4693    # A boundary is needed around these points.
4694    # But if the zone of points is obvious enough auto-segment should do
4695    # a good boundary.
4696    mesh = Mesh()
4697    mesh.add_vertices(points_utm)
4698    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4699    # To try and avoid alpha shape 'hugging' too much
4700    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
4701    if hole_points_UTM is not None:
4702        point = ensure_absolute(hole_points_UTM)
4703        mesh.add_hole(point[0], point[1])
4704    try:
4705        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4706    except NoTrianglesError:
4707        # This is a bit of a hack, going in and changing the
4708        # data structure.
4709        mesh.holes = []
4710        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4711    mesh_dic = mesh.Mesh2MeshList()
4712
4713    #mesh.export_mesh_file(basename_in + '_168.tsh')
4714    #import sys; sys.exit()
4715    # These are the times of the mux file
4716    mux_times = []
4717    for i in range(a_mux.time_step_count):
4718        mux_times.append(a_mux.time_step * i) 
4719    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4720    times = mux_times[mux_times_start_i:mux_times_fin_i]
4721   
4722    if mux_times_start_i == mux_times_fin_i:
4723        # Close the mux files
4724        for quantity, file in map(None, quantities, files_in):
4725            mux[quantity].close()
4726        msg="Due to mint and maxt there's no time info in the boundary SWW."
4727        raise Exception, msg
4728       
4729    # If this raise is removed there is currently no downstream errors
4730           
4731    points_utm=ensure_numeric(points_utm)
4732    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4733           ensure_numeric(points_utm)
4734   
4735    volumes = mesh_dic['generatedtrianglelist']
4736   
4737    # write sww intro and grid stuff.   
4738    if basename_out is None:
4739        swwname = basename_in + '.sww'
4740    else:
4741        swwname = basename_out + '.sww'
4742
4743    if verbose: print 'Output to ', swwname
4744    outfile = NetCDFFile(swwname, 'w')
4745    # For a different way of doing this, check out tsh2sww
4746    # work out sww_times and the index range this covers
4747    sww = Write_sww()
4748    sww.store_header(outfile, times, len(volumes), len(points_utm),
4749                     verbose=verbose,sww_precision=Float)
4750    outfile.mean_stage = mean_stage
4751    outfile.zscale = zscale
4752
4753    sww.store_triangulation(outfile, points_utm, volumes,
4754                            elevation, zone,  new_origin=origin,
4755                            verbose=verbose)
4756   
4757    if verbose: print 'Converting quantities'
4758    j = 0
4759    # Read in a time slice from each mux file and write it to the sww file
4760    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
4761        if j >= mux_times_start_i and j < mux_times_fin_i:
4762            stage = zscale*ha + mean_stage
4763            h = stage - elevation
4764            xmomentum = ua*h
4765            ymomentum = -1*va*h # -1 since in mux files south is positive.
4766            sww.store_quantities(outfile, 
4767                                 slice_index=j - mux_times_start_i,
4768                                 verbose=verbose,
4769                                 stage=stage,
4770                                 xmomentum=xmomentum,
4771                                 ymomentum=ymomentum,
4772                                 sww_precision=Float)
4773        j += 1
4774    if verbose: sww.verbose_quantities(outfile)
4775    outfile.close()
4776    #
4777    # Do some conversions while writing the sww file
4778
4779   
4780def mux2sww_time(mux_times, mint, maxt):
4781    """
4782    """
4783
4784    if mint == None:
4785        mux_times_start_i = 0
4786    else:
4787        mux_times_start_i = searchsorted(mux_times, mint)
4788       
4789    if maxt == None:
4790        mux_times_fin_i = len(mux_times)
4791    else:
4792        maxt += 0.5 # so if you specify a time where there is
4793                    # data that time will be included
4794        mux_times_fin_i = searchsorted(mux_times, maxt)
4795
4796    return mux_times_start_i, mux_times_fin_i
4797
4798
4799class Write_sww:
4800    from anuga.shallow_water.shallow_water_domain import Domain
4801
4802    # FIXME (Ole): Hardwiring the conserved quantities like
4803    # this could be a problem. I would prefer taking them from
4804    # the instantiation of Domain.
4805    #
4806    # (DSG) There is not always a Domain instance when Write_sww is used.
4807    # Check to see if this is the same level of hardwiring as is in
4808    # shallow water doamain.
4809   
4810    sww_quantities = Domain.conserved_quantities
4811
4812
4813    RANGE = '_range'
4814    EXTREMA = ':extrema'
4815
4816    def __init__(self):
4817        pass
4818   
4819    def store_header(self,
4820                     outfile,
4821                     times,
4822                     number_of_volumes,
4823                     number_of_points,
4824                     description='Converted from XXX',
4825                     smoothing=True,
4826                     order=1,
4827                     sww_precision=Float32,
4828                     verbose=False):
4829        """
4830        outfile - the name of the file that will be written
4831        times - A list of the time slice times OR a start time
4832        Note, if a list is given the info will be made relative.
4833        number_of_volumes - the number of triangles
4834        """
4835   
4836        outfile.institution = 'Geoscience Australia'
4837        outfile.description = description
4838
4839        # For sww compatibility
4840        if smoothing is True:
4841            # Smoothing to be depreciated
4842            outfile.smoothing = 'Yes'
4843            outfile.vertices_are_stored_uniquely = 'False'
4844        else:
4845            # Smoothing to be depreciated
4846            outfile.smoothing = 'No'
4847            outfile.vertices_are_stored_uniquely = 'True'
4848        outfile.order = order
4849
4850        try:
4851            revision_number = get_revision_number()
4852        except:
4853            revision_number = None
4854        # Allow None to be stored as a string               
4855        outfile.revision_number = str(revision_number) 
4856
4857
4858       
4859        # times - A list or array of the time slice times OR a start time
4860        # times = ensure_numeric(times)
4861        # Start time in seconds since the epoch (midnight 1/1/1970)
4862
4863        # This is being used to seperate one number from a list.
4864        # what it is actually doing is sorting lists from numeric arrays.
4865        if type(times) is list or type(times) is ArrayType: 
4866            number_of_times = len(times)
4867            times = ensure_numeric(times) 
4868            if number_of_times == 0:
4869                starttime = 0
4870            else:
4871                starttime = times[0]
4872                times = times - starttime  #Store relative times
4873        else:
4874            number_of_times = 0
4875            starttime = times
4876            #times = ensure_numeric([])
4877        outfile.starttime = starttime
4878        # dimension definitions
4879        outfile.createDimension('number_of_volumes', number_of_volumes)
4880        outfile.createDimension('number_of_vertices', 3)
4881        outfile.createDimension('numbers_in_range', 2)
4882   
4883        if smoothing is True:
4884            outfile.createDimension('number_of_points', number_of_points)
4885       
4886            # FIXME(Ole): This will cause sww files for paralle domains to
4887            # have ghost nodes stored (but not used by triangles).
4888            # To clean this up, we have to change get_vertex_values and
4889            # friends in quantity.py (but I can't be bothered right now)
4890        else:
4891            outfile.createDimension('number_of_points', 3*number_of_volumes)
4892        outfile.createDimension('number_of_timesteps', number_of_times)
4893
4894        # variable definitions
4895        outfile.createVariable('x', sww_precision, ('number_of_points',))
4896        outfile.createVariable('y', sww_precision, ('number_of_points',))
4897        outfile.createVariable('elevation', sww_precision, ('number_of_points',))
4898        q = 'elevation'
4899        outfile.createVariable(q+Write_sww.RANGE, sww_precision,
4900                               ('numbers_in_range',))
4901
4902
4903        # Initialise ranges with small and large sentinels.
4904        # If this was in pure Python we could have used None sensibly
4905        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
4906        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4907
4908        # FIXME: Backwards compatibility
4909        outfile.createVariable('z', sww_precision, ('number_of_points',))
4910        #################################
4911
4912        outfile.createVariable('volumes', Int, ('number_of_volumes',
4913                                                'number_of_vertices'))
4914        # Doing sww_precision instead of Float gives cast errors.
4915        outfile.createVariable('time', Float,
4916                               ('number_of_timesteps',))
4917       
4918        for q in Write_sww.sww_quantities:
4919            outfile.createVariable(q, sww_precision,
4920                                   ('number_of_timesteps',
4921                                    'number_of_points')) 
4922            outfile.createVariable(q+Write_sww.RANGE, sww_precision,
4923                                   ('numbers_in_range',))
4924
4925            # Initialise ranges with small and large sentinels.
4926            # If this was in pure Python we could have used None sensibly
4927            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
4928            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
4929           
4930        if type(times) is list or type(times) is ArrayType: 
4931            outfile.variables['time'][:] = times    #Store time relative
4932           
4933        if verbose:
4934            print '------------------------------------------------'
4935            print 'Statistics:'
4936            print '    t in [%f, %f], len(t) == %d'\
4937                  %(min(times.flat), max(times.flat), len(times.flat))
4938
4939       
4940    def store_triangulation(self,
4941                            outfile,
4942                            points_utm,
4943                            volumes,
4944                            elevation, zone=None, new_origin=None, 
4945                            points_georeference=None, verbose=False):
4946        """
4947       
4948        new_origin - qa georeference that the points can be set to. (Maybe
4949        do this before calling this function.)
4950       
4951        points_utm - currently a list or array of the points in UTM.
4952        points_georeference - the georeference of the points_utm
4953       
4954        How about passing new_origin and current_origin.
4955        If you get both, do a convertion from the old to the new.
4956       
4957        If you only get new_origin, the points are absolute,
4958        convert to relative
4959       
4960        if you only get the current_origin the points are relative, store
4961        as relative.
4962       
4963        if you get no georefs create a new georef based on the minimums of
4964        points_utm.  (Another option would be to default to absolute)
4965       
4966        Yes, and this is done in another part of the code.
4967        Probably geospatial.
4968       
4969        If you don't supply either geo_refs, then supply a zone. If not
4970        the default zone will be used.
4971       
4972       
4973        precon
4974       
4975        header has been called.
4976        """
4977       
4978        number_of_points = len(points_utm)   
4979        volumes = array(volumes) 
4980        points_utm = array(points_utm)
4981
4982        # given the two geo_refs and the points, do the stuff
4983        # described in the method header
4984        # if this is needed else where, pull out as a function
4985        points_georeference = ensure_geo_reference(points_georeference)
4986        new_origin = ensure_geo_reference(new_origin)
4987        if new_origin is None and points_georeference is not None:
4988            points = points_utm
4989            geo_ref = points_georeference
4990        else:
4991            if new_origin is None:
4992                new_origin = Geo_reference(zone,min(points_utm[:,0]),
4993                                           min(points_utm[:,1]))
4994            points = new_origin.change_points_geo_ref(points_utm,
4995                                                      points_georeference)
4996            geo_ref = new_origin
4997
4998        # At this stage I need a georef and points
4999        # the points are relative to the georef
5000        geo_ref.write_NetCDF(outfile)
5001   
5002        # This will put the geo ref in the middle
5003        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
5004       
5005        x =  points[:,0]
5006        y =  points[:,1]
5007        z = outfile.variables['z'][:]
5008   
5009        if verbose:
5010            print '------------------------------------------------'
5011            print 'More Statistics:'
5012            print '  Extent (/lon):'
5013            print '    x in [%f, %f], len(lat) == %d'\
5014                  %(min(x), max(x),
5015                    len(x))
5016            print '    y in [%f, %f], len(lon) == %d'\
5017                  %(min(y), max(y),
5018                    len(y))
5019            print '    z in [%f, %f], len(z) == %d'\
5020                  %(min(elevation), max(elevation),
5021                    len(elevation))
5022            print 'geo_ref: ',geo_ref
5023            print '------------------------------------------------'
5024           
5025        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5026        #print "points[:,0]", points[:,0]
5027        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5028        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5029        outfile.variables['z'][:] = elevation
5030        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5031        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
5032
5033        q = 'elevation'
5034        # This updates the _range values
5035        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5036        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
5037
5038
5039    def store_quantities(self, outfile, sww_precision=Float32,
5040                         slice_index=None, time=None,
5041                         verbose=False, **quant):
5042        """
5043        Write the quantity info.
5044
5045        **quant is extra keyword arguments passed in. These must be
5046          the sww quantities, currently; stage, xmomentum, ymomentum.
5047       
5048        if the time array is already been built, use the slice_index
5049        to specify the index.
5050       
5051        Otherwise, use time to increase the time dimension
5052
5053        Maybe make this general, but the viewer assumes these quantities,
5054        so maybe we don't want it general - unless the viewer is general
5055       
5056        precon
5057        triangulation and
5058        header have been called.
5059        """
5060
5061        if time is not None:
5062            file_time = outfile.variables['time']
5063            slice_index = len(file_time)
5064            file_time[slice_index] = time   
5065
5066        # Write the conserved quantities from Domain.
5067        # Typically stage,  xmomentum, ymomentum
5068        # other quantities will be ignored, silently.
5069        # Also write the ranges: stage_range,
5070        # xmomentum_range and ymomentum_range
5071        for q in Write_sww.sww_quantities:
5072            if not quant.has_key(q):
5073                msg = 'SWW file can not write quantity %s' %q
5074                raise NewQuantity, msg
5075            else:
5076                q_values = quant[q]
5077                outfile.variables[q][slice_index] = \
5078                                q_values.astype(sww_precision)
5079
5080                # This updates the _range values
5081                q_range = outfile.variables[q+Write_sww.RANGE][:]
5082                q_values_min = min(q_values)
5083                if q_values_min < q_range[0]:
5084                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5085                q_values_max = max(q_values)
5086                if q_values_max > q_range[1]:
5087                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5088
5089    def verbose_quantities(self, outfile):
5090        print '------------------------------------------------'
5091        print 'More Statistics:'
5092        for q in Write_sww.sww_quantities:
5093            print %s in [%f, %f]' %(q,
5094                                       outfile.variables[q+Write_sww.RANGE][0],
5095                                       outfile.variables[q+Write_sww.RANGE][1])
5096        print '------------------------------------------------'
5097
5098
5099       
5100def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
5101                         mean_stage=0, zscale=1,
5102                         verbose=False):   
5103    #Time stepping
5104    stage = outfile.variables['stage']
5105    xmomentum = outfile.variables['xmomentum']
5106    ymomentum = outfile.variables['ymomentum']
5107
5108    n = len(has)
5109    j=0
5110    for ha, ua, va in map(None, has, uas, vas):
5111        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5112        w = zscale*ha + mean_stage
5113        stage[j] = w
5114        h = w - elevation
5115        xmomentum[j] = ua*h
5116        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
5117        j += 1
5118   
5119def urs2txt(basename_in, location_index=None):
5120    """
5121    Not finished or tested
5122    """
5123   
5124    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5125                basename_in + EAST_VELOCITY_LABEL,
5126                basename_in + NORTH_VELOCITY_LABEL]
5127    quantities = ['HA','UA','VA']
5128
5129    d = ","
5130   
5131    # instanciate urs_points of the three mux files.
5132    mux = {}
5133    for quantity, file in map(None, quantities, files_in):
5134        mux[quantity] = Urs_points(file)
5135       
5136    # Could check that the depth is the same. (hashing)
5137
5138    # handle to a mux file to do depth stuff
5139    a_mux = mux[quantities[0]]
5140   
5141    # Convert to utm
5142    latitudes = a_mux.lonlatdep[:,1]
5143    longitudes = a_mux.lonlatdep[:,0]
5144    points_utm, zone = convert_from_latlon_to_utm( \
5145        latitudes=latitudes, longitudes=longitudes)
5146    #print "points_utm", points_utm
5147    #print "zone", zone
5148    depths = a_mux.lonlatdep[:,2]  #
5149   
5150    fid = open(basename_in + '.txt', 'w')
5151
5152    fid.write("zone: " + str(zone) + "\n")
5153
5154    if location_index is not None:
5155        #Title
5156        li = location_index
5157        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5158                  'Northing' + "\n")
5159        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5160              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5161              str(points_utm[li][01]) + "\n")
5162
5163    # the non-time dependent stuff
5164    #Title
5165    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5166                  'Northing' +d+ 'depth m' + "\n")
5167    i = 0
5168    for depth, point_utm, lat, long in map(None, depths,
5169                                               points_utm, latitudes,
5170                                               longitudes):
5171       
5172        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5173                  str(point_utm[01]) +d+ str(depth) + "\n")
5174        i +=1
5175    #Time dependent
5176    if location_index is not None:
5177        time_step = a_mux.time_step
5178        i = 0
5179        #Title
5180        fid.write('time' +d+ 'HA depth m'+d+ \
5181                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
5182                      + "\n")
5183        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5184            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5185                      str(UA[location_index]) +d+ str(VA[location_index]) \
5186                      + "\n")
5187           
5188            i +=1
5189   
5190class Urs_points:
5191    """
5192    Read the info in URS mux files.
5193
5194    for the quantities heres a correlation between the file names and
5195    what they mean;
5196    z-mux is height above sea level, m
5197    e-mux is velocity is Eastern direction, m/s
5198    n-mux is velocity is Northern direction, m/s   
5199    """
5200    def __init__(self,urs_file):
5201        self.iterated = False
5202        columns = 3 # long, lat , depth
5203        mux_file = open(urs_file, 'rb')
5204       
5205        # Number of points/stations
5206        (self.points_num,)= unpack('i',mux_file.read(4))
5207       
5208        # nt, int - Number of time steps
5209        (self.time_step_count,)= unpack('i',mux_file.read(4))
5210        #print "self.time_step_count", self.time_step_count
5211        #dt, float - time step, seconds
5212        (self.time_step,) = unpack('f', mux_file.read(4))
5213        #print "self.time_step", self.time_step
5214        msg = "Bad data in the urs file."
5215        if self.points_num < 0:
5216            mux_file.close()
5217            raise ANUGAError, msg
5218        if self.time_step_count < 0:
5219            mux_file.close()
5220            raise ANUGAError, msg
5221        if self.time_step < 0:
5222            mux_file.close()
5223            raise ANUGAError, msg
5224
5225        # the depth is in meters, and it is the distance from the ocean
5226        # to the sea bottom.
5227        lonlatdep = p_array.array('f')
5228        lonlatdep.read(mux_file, columns * self.points_num)
5229        lonlatdep = array(lonlatdep, typecode=Float)   
5230        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5231        #print 'lonlatdep',lonlatdep
5232        self.lonlatdep = lonlatdep
5233       
5234        self.mux_file = mux_file
5235        # check this array
5236
5237    def __iter__(self):
5238        """
5239        iterate over quantity data which is with respect to time.
5240
5241        Note: You can only interate once over an object
5242       
5243        returns quantity infomation for each time slice
5244        """
5245        msg =  "You can only interate once over a urs file."
5246        assert not self.iterated, msg
5247        self.iter_time_step = 0
5248        self.iterated = True
5249        return self
5250   
5251    def next(self):
5252        if self.time_step_count == self.iter_time_step:
5253            self.close()
5254            raise StopIteration
5255        #Read in a time slice  from mux file 
5256        hz_p_array = p_array.array('f')
5257        hz_p_array.read(self.mux_file, self.points_num)
5258        hz_p = array(hz_p_array, typecode=Float)
5259        self.iter_time_step += 1
5260       
5261        return hz_p
5262
5263    def close(self):
5264        self.mux_file.close()
5265       
5266    #### END URS UNGRIDDED 2 SWW ###
5267
5268       
5269def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',
5270                         verbose=True):
5271    """
5272    Used to store screen output and errors to file, if run on multiple
5273    processes eachprocessor will have its own output and error file.
5274   
5275    extra_info - is used as a string that can identify outputs with another
5276    string eg. '_other'
5277   
5278    FIXME: Would be good if you could suppress all the screen output and
5279    only save it to file... however it seems a bit tricky as this capture
5280    techique response to sys.stdout and by this time it is already printed out.
5281    """
5282   
5283    import sys
5284#    dir_name = dir_name
5285    if dir_name == None:
5286        dir_name=getcwd()
5287       
5288    if access(dir_name,W_OK) == 0:
5289        if verbose: print 'Making directory %s' %dir_name
5290      #  if verbose: print "myid", myid
5291        mkdir (dir_name,0777)
5292
5293    if myid <>'':
5294        myid = '_'+str(myid)
5295    if numprocs <>'':
5296        numprocs = '_'+str(numprocs)
5297    if extra_info <>'':
5298        extra_info = '_'+str(extra_info)
5299#    print 'hello1'
5300    screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %(myid,
5301                                                                numprocs,
5302                                                                extra_info))
5303    screen_error_name = join(dir_name,  "screen_error%s%s%s.txt" %(myid,
5304                                                              numprocs,
5305                                                              extra_info))
5306
5307    if verbose: print 'Starting ScreenCatcher, all output will be stored in %s' \
5308                                     %(screen_output_name)
5309    #used to catch screen output to file
5310    sys.stdout = Screen_Catcher(screen_output_name)
5311    sys.stderr = Screen_Catcher(screen_error_name)
5312
5313class Screen_Catcher:
5314    """this simply catches the screen output and stores it to file defined by
5315    start_screen_catcher (above)
5316    """
5317   
5318    def __init__(self, filename):
5319        self.filename = filename
5320#        print 'init'
5321        if exists(self.filename)is True:
5322            print'Old existing file "%s" has been deleted' %(self.filename)
5323            remove(self.filename)
5324
5325    def write(self, stuff):
5326        fid = open(self.filename, 'a')
5327        fid.write(stuff)
5328        fid.close()
5329       
5330def copy_code_files(dir_name, filename1, filename2=None):
5331    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5332    information management
5333    filename1 and filename2 are both absolute pathnames   
5334    """
5335
5336    if access(dir_name,F_OK) == 0:
5337        print 'Make directory %s' %dir_name
5338        mkdir (dir_name,0777)
5339    shutil.copy(filename1, dir_name + sep + basename(filename1))
5340    if filename2!=None:
5341        shutil.copy(filename2, dir_name + sep + basename(filename2))
5342        print 'Files %s and %s copied' %(filename1, filename2)
5343    else:
5344        print 'File %s copied' %(filename1)
5345
5346def get_data_from_file(filename,separator_value = ','):
5347    """
5348    Read in data information from file and
5349   
5350    Returns:
5351        header_fields, a string? of the first line separated
5352        by the 'separator_value'
5353       
5354        data, a array (N data columns X M lines) in the file
5355        excluding the header
5356       
5357    NOTE: wont deal with columns with different lenghts and there must be
5358    no blank lines at the end.
5359    """
5360   
5361    fid = open(filename)
5362    lines = fid.readlines()
5363   
5364    fid.close()
5365   
5366    header_line = lines[0]
5367    header_fields = header_line.split(separator_value)
5368
5369    #array to store data, number in there is to allow float...
5370    #i'm sure there is a better way!
5371    data=array([],typecode=Float)
5372    data=resize(data,((len(lines)-1),len(header_fields)))
5373#    print 'number of fields',range(len(header_fields))
5374#    print 'number of lines',len(lines), shape(data)
5375#    print'data',data[1,1],header_line
5376
5377    array_number = 0
5378    line_number = 1
5379    while line_number < (len(lines)):
5380        for i in range(len(header_fields)): 
5381            #this get line below the header, explaining the +1
5382            #and also the line_number can be used as the array index
5383            fields = lines[line_number].split(separator_value)
5384            #assign to array
5385            data[array_number,i] = float(fields[i])
5386           
5387        line_number = line_number +1
5388        array_number = array_number +1
5389       
5390    return header_fields, data
5391
5392def store_parameters(verbose=False,**kwargs):
5393    """
5394    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5395    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5396   
5397    Must have a file_name keyword arg, this is what is writing to.
5398    might be a better way to do this using CSV module Writer and writeDict
5399   
5400    writes file to "output_dir" unless "completed" is in kwargs, then
5401    it writes to "file_name" kwargs
5402
5403    """
5404    import types
5405#    import os
5406   
5407    # Check that kwargs is a dictionary
5408    if type(kwargs) != types.DictType:
5409        raise TypeError
5410   
5411    #is completed is kwargs?
5412    try:
5413        kwargs['completed']
5414        completed=True
5415    except:
5416        completed=False
5417 
5418    #get file name and removes from dict and assert that a file_name exists
5419    if completed:
5420        try:
5421            file = str(kwargs['file_name'])
5422        except:
5423            raise 'kwargs must have file_name'
5424    else:
5425        #write temp file in output directory
5426        try:
5427            file = str(kwargs['output_dir'])+'detail_temp.csv'
5428        except:
5429            raise 'kwargs must have output_dir'
5430       
5431    #extracts the header info and the new line info
5432    line=''
5433    header=''
5434    count=0
5435    keys = kwargs.keys()
5436    keys.sort()
5437   
5438    #used the sorted keys to create the header and line data
5439    for k in keys:
5440#        print "%s = %s" %(k, kwargs[k])
5441        header = header+str(k)
5442        line = line+str(kwargs[k])
5443        count+=1
5444        if count <len(kwargs):
5445            header = header+','
5446            line = line+','
5447    header+='\n'
5448    line+='\n'
5449
5450    # checks the header info, if the same, then write, if not create a new file
5451    #try to open!
5452    try:
5453        fid = open(file,"r")
5454        file_header=fid.readline()
5455        fid.close()
5456        if verbose: print 'read file header %s' %file_header
5457       
5458    except:
5459        msg = 'try to create new file',file
5460        if verbose: print msg
5461        #tries to open file, maybe directory is bad
5462        try:
5463            fid = open(file,"w")
5464            fid.write(header)
5465            fid.close()
5466            file_header=header
5467        except:
5468            msg = 'cannot create new file',file
5469            raise msg
5470           
5471    #if header is same or this is a new file
5472    if file_header==str(header):
5473        fid=open(file,"a")
5474        #write new line
5475        fid.write(line)
5476        fid.close()
5477    else:
5478        #backup plan,
5479        # if header is different and has completed will append info to
5480        #end of details_temp.cvs file in output directory
5481        file = str(kwargs['output_dir'])+'detail_temp.csv'
5482        fid=open(file,"a")
5483        fid.write(header)
5484        fid.write(line)
5485        fid.close()
5486        if verbose: print 'file',file_header.strip('\n')
5487        if verbose: print 'head',header.strip('\n')
5488        if file_header.strip('\n')==str(header): print 'they equal'
5489        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5490        print msg
5491
5492
5493
5494
5495
5496def get_maximum_inundation_elevation(filename,
5497                                     polygon=None,
5498                                     time_interval=None,
5499                                     verbose=False):
5500   
5501    """Return highest elevation where depth > 0
5502   
5503    Usage:
5504    max_runup = get_maximum_inundation_elevation(filename,
5505                                                 polygon=None,
5506                                                 time_interval=None,
5507                                                 verbose=False)
5508
5509    filename is a NetCDF sww file containing ANUGA model output.   
5510    Optional arguments polygon and time_interval restricts the maximum
5511    runup calculation
5512    to a points that lie within the specified polygon and time interval.
5513
5514    If no inundation is found within polygon and time_interval the return value
5515    is None signifying "No Runup" or "Everything is dry".
5516
5517    See general function get_maximum_inundation_data for details.
5518   
5519    """
5520   
5521    runup, _ = get_maximum_inundation_data(filename,
5522                                           polygon=polygon,
5523                                           time_interval=time_interval,
5524                                           verbose=verbose)
5525    return runup
5526
5527
5528
5529
5530def get_maximum_inundation_location(filename,
5531                                    polygon=None,
5532                                    time_interval=None,
5533                                    verbose=False):
5534    """Return location of highest elevation where h > 0
5535   
5536   
5537    Usage:
5538    max_runup_location = get_maximum_inundation_location(filename,
5539                                                         polygon=None,
5540                                                         time_interval=None,
5541                                                         verbose=False)
5542
5543    filename is a NetCDF sww file containing ANUGA model output.
5544    Optional arguments polygon and time_interval restricts the maximum
5545    runup calculation
5546    to a points that lie within the specified polygon and time interval.
5547
5548    If no inundation is found within polygon and time_interval the return value
5549    is None signifying "No Runup" or "Everything is dry".
5550
5551    See general function get_maximum_inundation_data for details.
5552    """
5553   
5554    _, max_loc = get_maximum_inundation_data(filename,
5555                                             polygon=polygon,
5556                                             time_interval=time_interval,
5557                                             verbose=verbose)
5558    return max_loc
5559   
5560
5561
5562def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
5563                                use_centroid_values=False,
5564                                verbose=False):
5565    """Compute maximum run up height from sww file.
5566
5567
5568    Usage:
5569    runup, location = get_maximum_inundation_data(filename,
5570                                                  polygon=None,
5571                                                  time_interval=None,
5572                                                  verbose=False)
5573   
5574
5575    Algorithm is as in get_maximum_inundation_elevation from
5576    shallow_water_domain
5577    except that this function works with the sww file and computes the maximal
5578    runup height over multiple timesteps.
5579   
5580    Optional arguments polygon and time_interval restricts the
5581    maximum runup calculation
5582    to a points that lie within the specified polygon and time interval.
5583    Polygon is
5584    assumed to be in (absolute) UTM coordinates in the same zone as domain.
5585
5586    If no inundation is found within polygon and time_interval the return value
5587    is None signifying "No Runup" or "Everything is dry".
5588    """
5589
5590    # We are using nodal values here as that is what is stored in sww files.
5591
5592    # Water depth below which it is considered to be 0 in the model
5593    # FIXME (Ole): Allow this to be specified as a keyword argument as well
5594
5595    from anuga.utilities.polygon import inside_polygon   
5596    from anuga.config import minimum_allowed_height
5597    from Scientific.IO.NetCDF import NetCDFFile
5598
5599    dir, base = os.path.split(filename)
5600           
5601    iterate_over = get_all_swwfiles(dir,base)
5602   
5603    # Read sww file
5604    if verbose: 
5605        print 'Reading from %s' %filename
5606        # FIXME: Use general swwstats (when done)
5607   
5608    maximal_runup = None
5609    maximal_runup_location = None
5610   
5611    for file, swwfile in enumerate (iterate_over):
5612       
5613        # Read sww file
5614        filename = join(dir,swwfile+'.sww')
5615       
5616        if verbose: 
5617            print 'Reading from %s' %filename
5618            # FIXME: Use general swwstats (when done)
5619               
5620        fid = NetCDFFile(filename)
5621   
5622        # Get geo_reference
5623        # sww files don't have to have a geo_ref
5624        try:
5625            geo_reference = Geo_reference(NetCDFObject=fid)
5626        except AttributeError, e:
5627            geo_reference = Geo_reference() # Default georef object
5628           
5629        xllcorner = geo_reference.get_xllcorner()
5630        yllcorner = geo_reference.get_yllcorner()
5631        zone = geo_reference.get_zone()
5632       
5633        # Get extent
5634        volumes = fid.variables['volumes'][:]   
5635        x = fid.variables['x'][:] + xllcorner
5636        y = fid.variables['y'][:] + yllcorner
5637   
5638   
5639        # Get the relevant quantities (Convert from single precison)
5640        elevation = array(fid.variables['elevation'][:], Float) 
5641        stage = array(fid.variables['stage'][:], Float)
5642   
5643   
5644        # Here's where one could convert nodal information to centroid
5645        # information
5646        # but is probably something we need to write in C.
5647        # Here's a Python thought which is NOT finished!!!
5648        if use_centroid_values is True:
5649            x = get_centroid_values(x, volumes)
5650            y = get_centroid_values(y, volumes)   
5651            elevation = get_centroid_values(elevation, volumes)   
5652   
5653   
5654        # Spatial restriction
5655        if polygon is not None:
5656            msg = 'polygon must be a sequence of points.'
5657            assert len(polygon[0]) == 2, msg
5658            # FIXME (Ole): Make a generic polygon input check in polygon.py
5659            # and call it here
5660           
5661            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
5662   
5663            point_indices = inside_polygon(points, polygon)
5664   
5665            # Restrict quantities to polygon
5666            elevation = take(elevation, point_indices)
5667            stage = take(stage, point_indices, axis=1)
5668   
5669            # Get info for location of maximal runup
5670            points_in_polygon = take(points, point_indices)
5671            x = points_in_polygon[:,0]
5672            y = points_in_polygon[:,1]       
5673        else:
5674            # Take all points
5675            point_indices = arange(len(x))
5676           
5677   
5678        # Temporal restriction
5679        time = fid.variables['time'][:]
5680        all_timeindices = arange(len(time))       
5681        if time_interval is not None:
5682           
5683            msg = 'time_interval must be a sequence of length 2.'
5684            assert len(time_interval) == 2, msg
5685            msg = 'time_interval %s must not be decreasing.' %(time_interval)
5686            assert time_interval[1] >= time_interval[0], msg
5687           
5688            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
5689            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
5690                   %(time[0], time[-1])
5691            if time_interval[1] < time[0]: raise ValueError(msg)
5692            if time_interval[0] > time[-1]: raise ValueError(msg)
5693   
5694            # Take time indices corresponding to interval (& is bitwise AND)
5695            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
5696                                 all_timeindices)
5697   
5698   
5699            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
5700            assert not alltrue(timesteps == 0), msg
5701   
5702   
5703        else:
5704            # Take them all
5705            timesteps = all_timeindices
5706       
5707   
5708        fid.close()
5709   
5710        # Compute maximal runup for each timestep
5711        #maximal_runup = None
5712        #maximal_runup_location = None
5713        #maximal_runups = [None]
5714        #maximal_runup_locations = [None]
5715       
5716        for i in timesteps:
5717            if use_centroid_values is True:
5718                stage_i = get_centroid_values(stage[i,:], volumes)   
5719            else:
5720                stage_i = stage[i,:]
5721               
5722            depth = stage_i  - elevation
5723       
5724            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
5725            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
5726   
5727            if alltrue(wet_nodes == 0):
5728                runup = None
5729            else:   
5730                # Find maximum elevation among wet nodes
5731                wet_elevation = take(elevation, wet_nodes)
5732   
5733                runup_index = argmax(wet_elevation)
5734                runup = max(wet_elevation)
5735                assert wet_elevation[runup_index] == runup # Must be True
5736            #print "runup", runup
5737            #print "maximal_runup", maximal_runup
5738           
5739            if runup > maximal_runup:
5740                maximal_runup = runup      # This works even if maximal_runups is None
5741                #print "NEW RUNUP",runup
5742   
5743                # Record location
5744                wet_x = take(x, wet_nodes)
5745                wet_y = take(y, wet_nodes)           
5746                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
5747   
5748    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
5749    return maximal_runup, maximal_runup_location
5750
5751def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
5752    '''
5753    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5754    will accept base_name with or without the extension ".sww"
5755   
5756    Returns: a list of strings
5757       
5758    Usage:     iterate_over = get_all_swwfiles(dir, name)
5759    then
5760               for swwfile in iterate_over:
5761                   do stuff
5762                   
5763    Check "export_grids" and "get_maximum_inundation_data" for examples
5764    '''
5765   
5766    #plus tests the extension
5767    name, extension = os.path.splitext(base_name)
5768
5769    if extension <>'' and extension <> '.sww':
5770        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
5771        raise IOError, msg
5772
5773    if look_in_dir == "":
5774        look_in_dir = "." # Unix compatibility
5775   
5776    dir_ls = os.listdir(look_in_dir)
5777    #print 'dir_ls',dir_ls, base
5778    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
5779    if len(iterate_over) == 0:
5780        msg = 'No files of the base name %s'\
5781              %(name)
5782        raise IOError, msg
5783    if verbose: print 'iterate over %s' %(iterate_over)
5784
5785    return iterate_over
5786
5787def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
5788    '''
5789    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5790   
5791   
5792    Returns: a list of strings
5793       
5794    Usage:     iterate_over = get_all_swwfiles(dir, name)
5795    then
5796               for swwfile in iterate_over:
5797                   do stuff
5798                   
5799    Check "export_grids" and "get_maximum_inundation_data" for examples
5800    '''
5801   
5802    #plus tests the extension
5803    name, ext = os.path.splitext(base_name)
5804#    print 'look_in_dir',look_in_dir
5805
5806    if ext <>'' and ext <> extension:
5807        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
5808        raise IOError, msg
5809
5810    if look_in_dir == "":
5811        look_in_dir = "." # Unix compatibility
5812#    print 'look_in_dir',look_in_dir, getcwd()
5813    dir_ls = os.listdir(look_in_dir)
5814    #print 'dir_ls',dir_ls, base_name
5815    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
5816    if len(iterate_over) == 0:
5817        msg = 'No files of the base name %s in %s'\
5818              %(name, look_in_dir)
5819        raise IOError, msg
5820    if verbose: print 'iterate over %s' %(iterate_over)
5821
5822    return iterate_over
5823
5824def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
5825    '''
5826    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5827   
5828   
5829    Returns: a list of strings
5830       
5831    Usage:     iterate_over = get_all_swwfiles(dir, name)
5832    then
5833               for swwfile in iterate_over:
5834                   do stuff
5835                   
5836    Check "export_grids" and "get_maximum_inundation_data" for examples
5837    '''
5838   
5839    #plus tests the extension
5840
5841    if look_in_dir == "":
5842        look_in_dir = "." # Unix compatibility
5843#    print 'look_in_dir',look_in_dir
5844    dir_ls = os.listdir(look_in_dir)
5845#    print 'dir_ls',dir_ls
5846    iterate_over = [x for x in dir_ls if base_name in x]
5847    if len(iterate_over) == 0:
5848        msg = 'No files of the base name %s'\
5849              %(name)
5850        raise IOError, msg
5851    if verbose: print 'iterate over %s' %(iterate_over)
5852
5853    return iterate_over
5854
5855def points2polygon(points_file,
5856                    minimum_triangle_angle=3.0):
5857    """
5858    WARNING: This function is not fully working. 
5859   
5860    Function to return a polygon returned from alpha shape, given a points file.
5861   
5862    WARNING: Alpha shape returns multiple polygons, but this function only returns one polygon.
5863   
5864    """
5865    from anuga.pmesh.mesh import Mesh, importMeshFromFile
5866    from anuga.shallow_water import Domain   
5867    from anuga.pmesh.mesh_interface import create_mesh_from_regions
5868   
5869    mesh = importMeshFromFile(points_file)
5870    mesh.auto_segment()
5871    mesh.exportASCIIsegmentoutlinefile("outline.tsh")
5872    mesh2 = importMeshFromFile("outline.tsh")
5873    mesh2.generate_mesh(maximum_triangle_area=1000000000, minimum_triangle_angle=minimum_triangle_angle, verbose=False)
5874    mesh2.export_mesh_file('outline_meshed.tsh')
5875    domain = Domain("outline_meshed.tsh", use_cache = False)
5876    polygon =  domain.get_boundary_polygon()
5877    return polygon
5878
5879#-------------------------------------------------------------
5880if __name__ == "__main__":
5881    #setting umask from config to force permissions for all files and directories
5882    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
5883    # set in your .bashrc etc file)
5884    from config import umask
5885    import os 
5886    os.umask(umask)
Note: See TracBrowser for help on using the repository browser.