source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4612

Last change on this file since 4612 was 4612, checked in by nick, 17 years ago

moved def _load_exposure_csv(self, file_name, title_check_list=None)
to
csv2dict(file_name, title_check_list=None)
This allows csv2dict to be used easily by other parts of ANUGA

File size: 185.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57class NewQuantity(exceptions.Exception): pass
58
59
60
61import csv
62import os, sys
63import shutil
64from struct import unpack
65import array as p_array
66#import time, os
67from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
68
69
70from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
71     searchsorted, zeros, allclose, around, reshape, transpose, sort, \
72     NewAxis, ArrayType, compress, take, arange, argmax, alltrue,shape,Float
73
74import string
75
76from Scientific.IO.NetCDF import NetCDFFile
77#from shutil import copy
78from os.path import exists, basename, join
79
80
81from anuga.coordinate_transforms.redfearn import redfearn, \
82     convert_from_latlon_to_utm
83from anuga.coordinate_transforms.geo_reference import Geo_reference, \
84     write_NetCDF_georeference, ensure_geo_reference
85from anuga.geospatial_data.geospatial_data import Geospatial_data,\
86     ensure_absolute
87from anuga.config import minimum_storable_height as default_minimum_storable_height
88from anuga.utilities.numerical_tools import ensure_numeric,  mean
89from anuga.caching.caching import myhash
90from anuga.utilities.anuga_exceptions import ANUGAError
91from anuga.shallow_water import Domain
92from anuga.abstract_2d_finite_volumes.pmesh2domain import \
93     pmesh_to_domain_instance
94from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
95     remove_lone_verts, sww2timeseries, get_centroid_values
96from anuga.load_mesh.loadASCII import export_mesh_file
97# formula mappings
98
99quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
100                    'depth':'stage-elevation',
101                    'speed': \
102 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
103
104
105   
106def make_filename(s):
107    """Transform argument string into a Sexsuitable filename
108    """
109
110    s = s.strip()
111    s = s.replace(' ', '_')
112    s = s.replace('(', '')
113    s = s.replace(')', '')
114    s = s.replace('__', '_')
115
116    return s
117
118
119def check_dir(path, verbose=None):
120    """Check that specified path exists.
121    If path does not exist it will be created if possible
122
123    USAGE:
124       checkdir(path, verbose):
125
126    ARGUMENTS:
127        path -- Directory
128        verbose -- Flag verbose output (default: None)
129
130    RETURN VALUE:
131        Verified path including trailing separator
132
133    """
134
135    import os.path
136
137    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
138        unix = 0
139    else:
140        unix = 1
141
142
143    if path[-1] != os.sep:
144        path = path + os.sep  # Add separator for directories
145
146    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
147    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
148        try:
149            exitcode=os.mkdir(path)
150
151            # Change access rights if possible
152            #
153            if unix:
154                exitcode=os.system('chmod 775 '+path)
155            else:
156                pass  # FIXME: What about acces rights under Windows?
157
158            if verbose: print 'MESSAGE: Directory', path, 'created.'
159
160        except:
161            print 'WARNING: Directory', path, 'could not be created.'
162            if unix:
163                path = '/tmp/'
164            else:
165                path = 'C:'
166
167            print 'Using directory %s instead' %path
168
169    return(path)
170
171
172
173def del_dir(path):
174    """Recursively delete directory path and all its contents
175    """
176
177    import os
178
179    if os.path.isdir(path):
180        for file in os.listdir(path):
181            X = os.path.join(path, file)
182
183
184            if os.path.isdir(X) and not os.path.islink(X):
185                del_dir(X)
186            else:
187                try:
188                    os.remove(X)
189                except:
190                    print "Could not remove file %s" %X
191
192        os.rmdir(path)
193       
194       
195# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
196def rmgeneric(path, __func__,verbose=False):
197    ERROR_STR= """Error removing %(path)s, %(error)s """
198
199    try:
200        __func__(path)
201        if verbose: print 'Removed ', path
202    except OSError, (errno, strerror):
203        print ERROR_STR % {'path' : path, 'error': strerror }
204           
205def removeall(path,verbose=False):
206
207    if not os.path.isdir(path):
208        return
209   
210    files=os.listdir(path)
211
212    for x in files:
213        fullpath=os.path.join(path, x)
214        if os.path.isfile(fullpath):
215            f=os.remove
216            rmgeneric(fullpath, f)
217        elif os.path.isdir(fullpath):
218            removeall(fullpath)
219            f=os.rmdir
220            rmgeneric(fullpath, f,verbose)
221
222
223
224def create_filename(datadir, filename, format, size=None, time=None):
225
226    import os
227    #from anuga.config import data_dir
228
229    FN = check_dir(datadir) + filename
230
231    if size is not None:
232        FN += '_size%d' %size
233
234    if time is not None:
235        FN += '_time%.2f' %time
236
237    FN += '.' + format
238    return FN
239
240
241def get_files(datadir, filename, format, size):
242    """Get all file (names) with given name, size and format
243    """
244
245    import glob
246
247    import os
248    #from anuga.config import data_dir
249
250    dir = check_dir(datadir)
251
252    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
253    return glob.glob(pattern)
254
255
256
257#Generic class for storing output to e.g. visualisation or checkpointing
258class Data_format:
259    """Generic interface to data formats
260    """
261
262
263    def __init__(self, domain, extension, mode = 'w'):
264        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
265                                        '''   'w' (write)'''+\
266                                        '''   'r' (read)''' +\
267                                        '''   'a' (append)'''
268
269        #Create filename
270        self.filename = create_filename(domain.get_datadir(),
271                                        domain.get_name(), extension)
272
273        #print 'F', self.filename
274        self.timestep = 0
275        self.domain = domain
276       
277
278
279        # Exclude ghosts in case this is a parallel domain
280        self.number_of_nodes = domain.number_of_full_nodes       
281        self.number_of_volumes = domain.number_of_full_triangles
282        #self.number_of_volumes = len(domain)       
283
284
285
286
287        #FIXME: Should we have a general set_precision function?
288
289
290
291#Class for storing output to e.g. visualisation
292class Data_format_sww(Data_format):
293    """Interface to native NetCDF format (.sww) for storing model output
294
295    There are two kinds of data
296
297    1: Constant data: Vertex coordinates and field values. Stored once
298    2: Variable data: Conserved quantities. Stored once per timestep.
299
300    All data is assumed to reside at vertex locations.
301    """
302
303
304    def __init__(self, domain, mode = 'w',\
305                 max_size = 2000000000,
306                 recursion = False):
307        from Scientific.IO.NetCDF import NetCDFFile
308        from Numeric import Int, Float, Float32
309
310        self.precision = Float32 #Use single precision for quantities
311        if hasattr(domain, 'max_size'):
312            self.max_size = domain.max_size #file size max is 2Gig
313        else:
314            self.max_size = max_size
315        self.recursion = recursion
316        self.mode = mode
317
318        Data_format.__init__(self, domain, 'sww', mode)
319
320        if hasattr(domain, 'minimum_storable_height'):
321            self.minimum_storable_height =  domain.minimum_storable_height
322        else:
323            self.minimum_storable_height = default_minimum_storable_height
324
325        # NetCDF file definition
326        fid = NetCDFFile(self.filename, mode)
327
328        if mode == 'w':
329            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
330            self.writer = Write_sww()
331            self.writer.header(fid, domain.starttime,
332                                         self.number_of_volumes,
333                                         self.domain.number_of_full_nodes,
334                                         description=description,
335                                         smoothing=domain.smooth,
336                                         order=domain.default_order)
337            if hasattr(domain, 'texture'):
338                fid.texture = domain.texture               
339    #        if domain.geo_reference is not None:
340    #            domain.geo_reference.write_NetCDF(fid)
341           
342#             fid.institution = 'Geoscience Australia'
343#             fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
344
345#             if domain.smooth:
346#                 fid.smoothing = 'Yes'
347#             else:
348#                 fid.smoothing = 'No'
349
350#             fid.order = domain.default_order
351
352#             #Reference point
353#             #Start time in seconds since the epoch (midnight 1/1/1970)
354#             #FIXME: Use Georef
355#             fid.starttime = domain.starttime
356
357#             # dimension definitions
358#             fid.createDimension('number_of_volumes', self.number_of_volumes)
359#             fid.createDimension('number_of_vertices', 3)
360
361#             if domain.smooth is True:
362#                 #fid.createDimension('number_of_points', len(domain.vertexlist))
363#                 fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
364
365#                 # FIXME(Ole): This will cause sww files for paralle domains to
366#                 # have ghost nodes stored (but not used by triangles).
367#                 # To clean this up, we have to change get_vertex_values and friends in
368#                 # quantity.py (but I can't be bothered right now)
369#             else:
370#                 fid.createDimension('number_of_points', 3*self.number_of_volumes)
371
372#             fid.createDimension('number_of_timesteps', None) #extensible
373
374#             # variable definitions
375#             fid.createVariable('x', self.precision, ('number_of_points',))
376#             fid.createVariable('y', self.precision, ('number_of_points',))
377#             fid.createVariable('elevation', self.precision, ('number_of_points',))
378#             if domain.geo_reference is not None:
379#                 domain.geo_reference.write_NetCDF(fid)
380
381#             #FIXME: Backwards compatibility
382#             fid.createVariable('z', self.precision, ('number_of_points',))
383#             #################################
384
385#             fid.createVariable('volumes', Int, ('number_of_volumes',
386#                                                 'number_of_vertices'))
387
388#             fid.createVariable('time', Float,  # Always use full precision lest two timesteps
389#                                              # close to each other may appear as the same step
390#                                ('number_of_timesteps',))
391
392#             fid.createVariable('stage', self.precision,
393#                                ('number_of_timesteps',
394#                                 'number_of_points'))
395
396#             fid.createVariable('xmomentum', self.precision,
397#                                ('number_of_timesteps',
398#                                 'number_of_points'))
399
400#             fid.createVariable('ymomentum', self.precision,
401#                                ('number_of_timesteps',
402#                                 'number_of_points'))
403
404        #Close
405        fid.close()
406
407
408    def store_connectivity(self):
409        """Specialisation of store_connectivity for net CDF format
410
411        Writes x,y,z coordinates of triangles constituting
412        the bed elevation.
413        """
414
415        from Scientific.IO.NetCDF import NetCDFFile
416
417        from Numeric import concatenate, Int
418
419        domain = self.domain
420
421        #Get NetCDF
422        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
423
424        # Get the variables
425        x = fid.variables['x']
426        y = fid.variables['y']
427        z = fid.variables['elevation']
428
429        volumes = fid.variables['volumes']
430
431        # Get X, Y and bed elevation Z
432        Q = domain.quantities['elevation']
433        X,Y,Z,V = Q.get_vertex_values(xy=True,
434                                      precision=self.precision)
435
436        #
437        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
438        self.writer.triangulation(fid, points,
439                                            V.astype(volumes.typecode()),
440                                            Z,
441                                            points_georeference= \
442                                            domain.geo_reference)
443        #
444                               
445#         volumes[:] = V.astype(volumes.typecode())
446#         x[:] = X
447#         y[:] = Y
448#         z[:] = Z
449
450#         #FIXME: Backwards compatibility
451#         z = fid.variables['z']
452#         z[:] = Z
453        ################################
454
455
456
457        #Close
458        fid.close()
459
460    def store_timestep(self, names):
461        """Store time and named quantities to file
462        """
463        from Scientific.IO.NetCDF import NetCDFFile
464        import types
465        from time import sleep
466        from os import stat
467
468        from Numeric import choose
469       
470        #Get NetCDF       
471        retries = 0
472        file_open = False
473        while not file_open and retries < 10:
474            try:
475                fid = NetCDFFile(self.filename, 'a') #Open existing file
476            except IOError:
477                #This could happen if someone was reading the file.
478                #In that case, wait a while and try again
479                msg = 'Warning (store_timestep): File %s could not be opened'\
480                      %self.filename
481                msg += ' - trying step %s again' %self.domain.time
482                print msg
483                retries += 1
484                sleep(1)
485            else:
486                file_open = True
487
488        if not file_open:
489            msg = 'File %s could not be opened for append' %self.filename
490            raise DataFileNotOpenError, msg
491
492
493
494        #Check to see if the file is already too big:
495        time = fid.variables['time']
496        i = len(time)+1
497        file_size = stat(self.filename)[6]
498        file_size_increase =  file_size/i
499        if file_size + file_size_increase > self.max_size*(2**self.recursion):
500            #in order to get the file name and start time correct,
501            #I change the domian.filename and domain.starttime.
502            #This is the only way to do this without changing
503            #other modules (I think).
504
505            #write a filename addon that won't break swollens reader
506            #(10.sww is bad)
507            filename_ext = '_time_%s'%self.domain.time
508            filename_ext = filename_ext.replace('.', '_')
509            #remember the old filename, then give domain a
510            #name with the extension
511            old_domain_filename = self.domain.get_name()
512            if not self.recursion:
513                self.domain.set_name(old_domain_filename+filename_ext)
514
515
516            #change the domain starttime to the current time
517            old_domain_starttime = self.domain.starttime
518            self.domain.starttime = self.domain.time
519
520            #build a new data_structure.
521            next_data_structure=\
522                Data_format_sww(self.domain, mode=self.mode,\
523                                max_size = self.max_size,\
524                                recursion = self.recursion+1)
525            if not self.recursion:
526                print '    file_size = %s'%file_size
527                print '    saving file to %s'%next_data_structure.filename
528            #set up the new data_structure
529            self.domain.writer = next_data_structure
530
531            #FIXME - could be cleaner to use domain.store_timestep etc.
532            next_data_structure.store_connectivity()
533            next_data_structure.store_timestep(names)
534            fid.sync()
535            fid.close()
536
537            #restore the old starttime and filename
538            self.domain.starttime = old_domain_starttime
539            self.domain.set_name(old_domain_filename)           
540        else:
541            self.recursion = False
542            domain = self.domain
543
544            # Get the variables
545            time = fid.variables['time']
546            stage = fid.variables['stage']
547            xmomentum = fid.variables['xmomentum']
548            ymomentum = fid.variables['ymomentum']
549            i = len(time)
550            if type(names) not in [types.ListType, types.TupleType]:
551                names = [names]
552
553            if 'stage' in names and 'xmomentum' in names and \
554                   'ymomentum' in names:
555
556                # Get stage
557                Q = domain.quantities['stage']
558                A,_ = Q.get_vertex_values(xy = False,
559                                          precision = self.precision)               
560                z = fid.variables['elevation']
561                stage = choose(A-z[:] >= self.minimum_storable_height,
562                           (z[:], A))
563               
564                # Get xmomentum
565                Q = domain.quantities['xmomentum']
566                xmomentum, _ = Q.get_vertex_values(xy = False,
567                                          precision = self.precision)
568               
569                # Get ymomentum
570                Q = domain.quantities['ymomentum']
571                ymomentum, _ = Q.get_vertex_values(xy = False,
572                                          precision = self.precision)
573                self.writer.quantities(fid, 
574                                                 time=self.domain.time,
575                                                 precision=self.precision,
576                                                 stage=stage,
577                                                 xmomentum=xmomentum,
578                                                 ymomentum=ymomentum)
579            else:
580                # This is producing a sww that is not standard.
581                #Store time
582                time[i] = self.domain.time
583               
584                for name in names:
585                    # Get quantity
586                    Q = domain.quantities[name]
587                    A,V = Q.get_vertex_values(xy = False,
588                                          precision = self.precision)
589
590                    #FIXME: Make this general (see below)
591                    if name == 'stage':
592                        z = fid.variables['elevation']
593                        A = choose(A-z[:] >= self.minimum_storable_height,
594                                   (z[:], A))
595                        stage[i,:] = A.astype(self.precision)
596                    elif name == 'xmomentum':
597                        xmomentum[i,:] = A.astype(self.precision)
598                    elif name == 'ymomentum':
599                        ymomentum[i,:] = A.astype(self.precision)
600
601                   #As in....
602                   #eval( name + '[i,:] = A.astype(self.precision)' )
603                   #FIXME: But we need a UNIT test for that before
604                   # refactoring
605
606
607
608            #Flush and close
609            fid.sync()
610            fid.close()
611
612
613
614#Class for handling checkpoints data
615class Data_format_cpt(Data_format):
616    """Interface to native NetCDF format (.cpt)
617    """
618
619
620    def __init__(self, domain, mode = 'w'):
621        from Scientific.IO.NetCDF import NetCDFFile
622        from Numeric import Int, Float, Float
623
624        self.precision = Float #Use full precision
625
626        Data_format.__init__(self, domain, 'sww', mode)
627
628
629        # NetCDF file definition
630        fid = NetCDFFile(self.filename, mode)
631
632        if mode == 'w':
633            #Create new file
634            fid.institution = 'Geoscience Australia'
635            fid.description = 'Checkpoint data'
636            #fid.smooth = domain.smooth
637            fid.order = domain.default_order
638
639            # dimension definitions
640            fid.createDimension('number_of_volumes', self.number_of_volumes)
641            fid.createDimension('number_of_vertices', 3)
642
643            #Store info at all vertices (no smoothing)
644            fid.createDimension('number_of_points', 3*self.number_of_volumes)
645            fid.createDimension('number_of_timesteps', None) #extensible
646
647            # variable definitions
648
649            #Mesh
650            fid.createVariable('x', self.precision, ('number_of_points',))
651            fid.createVariable('y', self.precision, ('number_of_points',))
652
653
654            fid.createVariable('volumes', Int, ('number_of_volumes',
655                                                'number_of_vertices'))
656
657            fid.createVariable('time', self.precision,
658                               ('number_of_timesteps',))
659
660            #Allocate space for all quantities
661            for name in domain.quantities.keys():
662                fid.createVariable(name, self.precision,
663                                   ('number_of_timesteps',
664                                    'number_of_points'))
665
666        #Close
667        fid.close()
668
669
670    def store_checkpoint(self):
671        """
672        Write x,y coordinates of triangles.
673        Write connectivity (
674        constituting
675        the bed elevation.
676        """
677
678        from Scientific.IO.NetCDF import NetCDFFile
679
680        from Numeric import concatenate
681
682        domain = self.domain
683
684        #Get NetCDF
685        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
686
687        # Get the variables
688        x = fid.variables['x']
689        y = fid.variables['y']
690
691        volumes = fid.variables['volumes']
692
693        # Get X, Y and bed elevation Z
694        Q = domain.quantities['elevation']
695        X,Y,Z,V = Q.get_vertex_values(xy=True,
696                      precision = self.precision)
697
698
699
700        x[:] = X.astype(self.precision)
701        y[:] = Y.astype(self.precision)
702        z[:] = Z.astype(self.precision)
703
704        volumes[:] = V
705
706        #Close
707        fid.close()
708
709
710    def store_timestep(self, name):
711        """Store time and named quantity to file
712        """
713        from Scientific.IO.NetCDF import NetCDFFile
714        from time import sleep
715
716        #Get NetCDF
717        retries = 0
718        file_open = False
719        while not file_open and retries < 10:
720            try:
721                fid = NetCDFFile(self.filename, 'a') #Open existing file
722            except IOError:
723                #This could happen if someone was reading the file.
724                #In that case, wait a while and try again
725                msg = 'Warning (store_timestep): File %s could not be opened'\
726                  %self.filename
727                msg += ' - trying again'
728                print msg
729                retries += 1
730                sleep(1)
731            else:
732                file_open = True
733
734        if not file_open:
735            msg = 'File %s could not be opened for append' %self.filename
736            raise DataFileNotOPenError, msg
737
738
739        domain = self.domain
740
741        # Get the variables
742        time = fid.variables['time']
743        stage = fid.variables['stage']
744        i = len(time)
745
746        #Store stage
747        time[i] = self.domain.time
748
749        # Get quantity
750        Q = domain.quantities[name]
751        A,V = Q.get_vertex_values(xy=False,
752                  precision = self.precision)
753
754        stage[i,:] = A.astype(self.precision)
755
756        #Flush and close
757        fid.sync()
758        fid.close()
759
760
761#### NED is national exposure database (name changed to NEXIS)
762   
763LAT_TITLE = 'LATITUDE'
764LONG_TITLE = 'LONGITUDE'
765X_TITLE = 'x'
766Y_TITLE = 'y'
767class Exposure_csv:
768    def __init__(self,file_name, latitude_title=LAT_TITLE,
769                 longitude_title=LONG_TITLE, is_x_y_locations=None,
770                 x_title=X_TITLE, y_title=Y_TITLE,
771                 refine_polygon=None, title_check_list=None):
772        """
773        This class is for handling the exposure csv file.
774        It reads the file in and converts the lats and longs to a geospatial
775        data object.
776        Use the methods to read and write columns.
777
778        The format of the csv files it reads is;
779           The first row is a title row.
780           comma's are the delimiters
781           each column is a 'set' of data
782
783        Feel free to use/expand it to read other csv files.
784           
785           
786        It is not for adding and deleting rows
787       
788        Can geospatial handle string attributes? It's not made for them.
789        Currently it can't load and save string att's.
790
791        So just use geospatial to hold the x, y and georef? Bad, since
792        different att's are in diferent structures.  Not so bad, the info
793        to write if the .csv file is saved is in attribute_dic
794
795        The location info is in the geospatial attribute.
796       
797       
798        """
799        self._file_name = file_name
800        self._geospatial = None #
801
802        # self._attribute_dic is a dictionary.
803        #The keys are the column titles.
804        #The values are lists of column data
805       
806        # self._title_index_dic is a dictionary.
807        #The keys are the column titles.
808        #The values are the index positions of file columns.
809        self._attribute_dic, self._title_index_dic = \
810            csv2dict(self._file_name, title_check_list=title_check_list)
811        try:
812            #Have code here that handles caps or lower
813            lats = self._attribute_dic[latitude_title]
814            longs = self._attribute_dic[longitude_title]
815           
816        except KeyError:
817            # maybe a warning..
818            #Let's see if this works..
819            if False != is_x_y_locations:
820                is_x_y_locations = True
821            pass
822        else:
823            self._geospatial = Geospatial_data(latitudes = lats,
824                 longitudes = longs)
825
826        if is_x_y_locations is True:
827            if self._geospatial is not None:
828                pass #fixme throw an error
829            try:
830                xs = self._attribute_dic[x_title]
831                ys = self._attribute_dic[y_title]
832                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
833            except KeyError:
834                # maybe a warning..
835                msg = "Could not find location information."
836                raise TitleValueError, msg
837            else:
838                self._geospatial = Geospatial_data(data_points=points)
839           
840        # create a list of points that are in the refining_polygon
841        # described by a list of indexes representing the points
842
843    def __cmp__(self, other):
844        #print "self._attribute_dic",self._attribute_dic
845        #print "other._attribute_dic",other._attribute_dic
846        #print "self._title_index_dic", self._title_index_dic
847        #print "other._title_index_dic", other._title_index_dic
848       
849        #check that a is an instance of this class
850        if isinstance(self, type(other)):
851            result = cmp(self._attribute_dic, other._attribute_dic)
852            if result <>0:
853                return result
854            # The order of the columns is important. Therefore..
855            result = cmp(self._title_index_dic, other._title_index_dic)
856            if result <>0:
857                return result
858            for self_ls, other_ls in map(None,self._attribute_dic, \
859                    other._attribute_dic):
860                result = cmp(self._attribute_dic[self_ls],
861                             other._attribute_dic[other_ls])
862                if result <>0:
863                    return result
864            return 0
865        else:
866            return 1
867   
868
869    def get_column(self, column_name, use_refind_polygon=False):
870        """
871        Given a column name return a list of the column values
872
873        Note, the type of the values will be String!
874        do this to change a list of strings to a list of floats
875        time = [float(x) for x in time]
876       
877        Not implemented:
878        if use_refind_polygon is True, only return values in the
879        refined polygon
880        """
881        if not self._attribute_dic.has_key(column_name):
882            msg = 'Therer is  no column called %s!' %column_name
883            raise TitleValueError, msg
884        return self._attribute_dic[column_name]
885
886
887    def get_value(self, value_column_name,
888                  known_column_name,
889                  known_values,
890                  use_refind_polygon=False):
891        """
892        Do linear interpolation on the known_colum, using the known_value,
893        to return a value of the column_value_name.
894        """
895        pass
896
897
898    def get_location(self, use_refind_polygon=False):
899        """
900        Return a geospatial object which describes the
901        locations of the location file.
902
903        Note, if there is not location info, this returns None.
904       
905        Not implemented:
906        if use_refind_polygon is True, only return values in the
907        refined polygon
908        """
909        return self._geospatial
910
911    def set_column(self, column_name, column_values, overwrite=False):
912        """
913        Add a column to the 'end' (with the right most column being the end)
914        of the csv file.
915
916        Set overwrite to True if you want to overwrite a column.
917       
918        Note, in column_name white space is removed and case is not checked.
919        Precondition
920        The column_name and column_values cannot have comma's in it.
921        """
922       
923        value_row_count = \
924            len(self._attribute_dic[self._title_index_dic.keys()[0]])
925        if len(column_values) <> value_row_count: 
926            msg = 'The number of column values must equal the number of rows.'
927            raise DataMissingValuesError, msg
928       
929        if self._attribute_dic.has_key(column_name):
930            if not overwrite:
931                msg = 'Column name %s already in use!' %column_name
932                raise TitleValueError, msg
933        else:
934            # New title.  Add it to the title index.
935            self._title_index_dic[column_name] = len(self._title_index_dic)
936        self._attribute_dic[column_name] = column_values
937        #print "self._title_index_dic[column_name]",self._title_index_dic
938
939    def save(self, file_name=None):
940        """
941        Save the exposure csv file
942        """
943        if file_name is None:
944            file_name = self._file_name
945       
946        fd = open(file_name,'wb')
947        writer = csv.writer(fd)
948       
949        #Write the title to a cvs file
950        line = [None]* len(self._title_index_dic)
951        for title in self._title_index_dic.iterkeys():
952            line[self._title_index_dic[title]]= title
953        writer.writerow(line)
954       
955        # Write the values to a cvs file
956        value_row_count = \
957            len(self._attribute_dic[self._title_index_dic.keys()[0]])
958        for row_i in range(value_row_count):
959            line = [None]* len(self._title_index_dic)
960            for title in self._title_index_dic.iterkeys():
961                line[self._title_index_dic[title]]= \
962                     self._attribute_dic[title][row_i]
963            writer.writerow(line)
964
965
966def csv2dict(file_name, title_check_list=None):
967    """
968    Load in the csv as a dic, title as key and column info as value, .
969    Also, create a dic, title as key and column index as value,
970    to keep track of the column order.
971   
972    WARNING: Vaules are returned as strings.
973    do this to change a list of strings to a list of floats
974        time = [float(x) for x in time]
975    """
976   
977    #
978    attribute_dic = {}
979    title_index_dic = {}
980    titles_stripped = [] # list of titles
981    reader = csv.reader(file(file_name))
982
983    # Read in and manipulate the title info
984    titles = reader.next()
985    for i,title in enumerate(titles):
986        titles_stripped.append(title.strip())
987        title_index_dic[title.strip()] = i
988    title_count = len(titles_stripped)       
989    #print "title_index_dic",title_index_dic
990    if title_check_list is not None:
991        for title_check in title_check_list:
992            #msg = "Reading error.  This row is not present ", title_check
993            #assert title_index_dic.has_key(title_check), msg
994            if not title_index_dic.has_key(title_check):
995                #reader.close()
996                msg = "Reading error.  This row is not present ", \
997                      title_check                     
998                raise IOError, msg
999               
1000   
1001   
1002    #create a dic of colum values, indexed by column title
1003    for line in reader:
1004        if len(line) <> title_count:
1005            raise IOError #FIXME make this nicer
1006        for i, value in enumerate(line):
1007            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
1008       
1009    return attribute_dic, title_index_dic
1010
1011
1012#Auxiliary
1013def write_obj(filename,x,y,z):
1014    """Store x,y,z vectors into filename (obj format)
1015       Vectors are assumed to have dimension (M,3) where
1016       M corresponds to the number elements.
1017       triangles are assumed to be disconnected
1018
1019       The three numbers in each vector correspond to three vertices,
1020
1021       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1022
1023    """
1024    #print 'Writing obj to %s' % filename
1025
1026    import os.path
1027
1028    root, ext = os.path.splitext(filename)
1029    if ext == '.obj':
1030        FN = filename
1031    else:
1032        FN = filename + '.obj'
1033
1034
1035    outfile = open(FN, 'wb')
1036    outfile.write("# Triangulation as an obj file\n")
1037
1038    M, N = x.shape
1039    assert N==3  #Assuming three vertices per element
1040
1041    for i in range(M):
1042        for j in range(N):
1043            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1044
1045    for i in range(M):
1046        base = i*N
1047        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1048
1049    outfile.close()
1050
1051
1052#########################################################
1053#Conversion routines
1054########################################################
1055
1056def sww2obj(basefilename, size):
1057    """Convert netcdf based data output to obj
1058    """
1059    from Scientific.IO.NetCDF import NetCDFFile
1060
1061    from Numeric import Float, zeros
1062
1063    #Get NetCDF
1064    FN = create_filename('.', basefilename, 'sww', size)
1065    print 'Reading from ', FN
1066    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1067
1068
1069    # Get the variables
1070    x = fid.variables['x']
1071    y = fid.variables['y']
1072    z = fid.variables['elevation']
1073    time = fid.variables['time']
1074    stage = fid.variables['stage']
1075
1076    M = size  #Number of lines
1077    xx = zeros((M,3), Float)
1078    yy = zeros((M,3), Float)
1079    zz = zeros((M,3), Float)
1080
1081    for i in range(M):
1082        for j in range(3):
1083            xx[i,j] = x[i+j*M]
1084            yy[i,j] = y[i+j*M]
1085            zz[i,j] = z[i+j*M]
1086
1087    #Write obj for bathymetry
1088    FN = create_filename('.', basefilename, 'obj', size)
1089    write_obj(FN,xx,yy,zz)
1090
1091
1092    #Now read all the data with variable information, combine with
1093    #x,y info and store as obj
1094
1095    for k in range(len(time)):
1096        t = time[k]
1097        print 'Processing timestep %f' %t
1098
1099        for i in range(M):
1100            for j in range(3):
1101                zz[i,j] = stage[k,i+j*M]
1102
1103
1104        #Write obj for variable data
1105        #FN = create_filename(basefilename, 'obj', size, time=t)
1106        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1107        write_obj(FN,xx,yy,zz)
1108
1109
1110def dat2obj(basefilename):
1111    """Convert line based data output to obj
1112    FIXME: Obsolete?
1113    """
1114
1115    import glob, os
1116    from anuga.config import data_dir
1117
1118
1119    #Get bathymetry and x,y's
1120    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1121
1122    from Numeric import zeros, Float
1123
1124    M = len(lines)  #Number of lines
1125    x = zeros((M,3), Float)
1126    y = zeros((M,3), Float)
1127    z = zeros((M,3), Float)
1128
1129    ##i = 0
1130    for i, line in enumerate(lines):
1131        tokens = line.split()
1132        values = map(float,tokens)
1133
1134        for j in range(3):
1135            x[i,j] = values[j*3]
1136            y[i,j] = values[j*3+1]
1137            z[i,j] = values[j*3+2]
1138
1139        ##i += 1
1140
1141
1142    #Write obj for bathymetry
1143    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1144
1145
1146    #Now read all the data files with variable information, combine with
1147    #x,y info
1148    #and store as obj
1149
1150    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1151
1152    for filename in files:
1153        print 'Processing %s' % filename
1154
1155        lines = open(data_dir+os.sep+filename,'r').readlines()
1156        assert len(lines) == M
1157        root, ext = os.path.splitext(filename)
1158
1159        #Get time from filename
1160        i0 = filename.find('_time=')
1161        if i0 == -1:
1162            #Skip bathymetry file
1163            continue
1164
1165        i0 += 6  #Position where time starts
1166        i1 = filename.find('.dat')
1167
1168        if i1 > i0:
1169            t = float(filename[i0:i1])
1170        else:
1171            raise DataTimeError, 'Hmmmm'
1172
1173
1174
1175        ##i = 0
1176        for i, line in enumerate(lines):
1177            tokens = line.split()
1178            values = map(float,tokens)
1179
1180            for j in range(3):
1181                z[i,j] = values[j]
1182
1183            ##i += 1
1184
1185        #Write obj for variable data
1186        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1187
1188
1189def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1190    """Read netcdf filename1, pick timesteps first:step:last and save to
1191    nettcdf file filename2
1192    """
1193    from Scientific.IO.NetCDF import NetCDFFile
1194
1195    #Get NetCDF
1196    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1197    outfile = NetCDFFile(filename2, 'w')  #Open new file
1198
1199
1200    #Copy dimensions
1201    for d in infile.dimensions:
1202        outfile.createDimension(d, infile.dimensions[d])
1203
1204    for name in infile.variables:
1205        var = infile.variables[name]
1206        outfile.createVariable(name, var.typecode(), var.dimensions)
1207
1208
1209    #Copy the static variables
1210    for name in infile.variables:
1211        if name == 'time' or name == 'stage':
1212            pass
1213        else:
1214            #Copy
1215            outfile.variables[name][:] = infile.variables[name][:]
1216
1217    #Copy selected timesteps
1218    time = infile.variables['time']
1219    stage = infile.variables['stage']
1220
1221    newtime = outfile.variables['time']
1222    newstage = outfile.variables['stage']
1223
1224    if last is None:
1225        last = len(time)
1226
1227    selection = range(first, last, step)
1228    for i, j in enumerate(selection):
1229        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1230        newtime[i] = time[j]
1231        newstage[i,:] = stage[j,:]
1232
1233    #Close
1234    infile.close()
1235    outfile.close()
1236
1237
1238#Get data objects
1239def get_dataobject(domain, mode='w'):
1240    """Return instance of class of given format using filename
1241    """
1242
1243    cls = eval('Data_format_%s' %domain.format)
1244    return cls(domain, mode)
1245
1246
1247
1248
1249def dem2pts(basename_in, basename_out=None,
1250            easting_min=None, easting_max=None,
1251            northing_min=None, northing_max=None,
1252            use_cache=False, verbose=False,):
1253    """Read Digitial Elevation model from the following NetCDF format (.dem)
1254
1255    Example:
1256
1257    ncols         3121
1258    nrows         1800
1259    xllcorner     722000
1260    yllcorner     5893000
1261    cellsize      25
1262    NODATA_value  -9999
1263    138.3698 137.4194 136.5062 135.5558 ..........
1264
1265    Convert to NetCDF pts format which is
1266
1267    points:  (Nx2) Float array
1268    elevation: N Float array
1269    """
1270
1271
1272
1273    kwargs = {'basename_out': basename_out,
1274              'easting_min': easting_min,
1275              'easting_max': easting_max,
1276              'northing_min': northing_min,
1277              'northing_max': northing_max,
1278              'verbose': verbose}
1279
1280    if use_cache is True:
1281        from caching import cache
1282        result = cache(_dem2pts, basename_in, kwargs,
1283                       dependencies = [basename_in + '.dem'],
1284                       verbose = verbose)
1285
1286    else:
1287        result = apply(_dem2pts, [basename_in], kwargs)
1288
1289    return result
1290
1291
1292def _dem2pts(basename_in, basename_out=None, verbose=False,
1293            easting_min=None, easting_max=None,
1294            northing_min=None, northing_max=None):
1295    """Read Digitial Elevation model from the following NetCDF format (.dem)
1296
1297    Internal function. See public function dem2pts for details.
1298    """
1299
1300    #FIXME: Can this be written feasibly using write_pts?
1301
1302    import os
1303    from Scientific.IO.NetCDF import NetCDFFile
1304    from Numeric import Float, zeros, reshape, sum
1305
1306    root = basename_in
1307
1308    #Get NetCDF
1309    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1310
1311    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1312
1313    ncols = infile.ncols[0]
1314    nrows = infile.nrows[0]
1315    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1316    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1317    cellsize = infile.cellsize[0]
1318    NODATA_value = infile.NODATA_value[0]
1319    dem_elevation = infile.variables['elevation']
1320
1321    zone = infile.zone[0]
1322    false_easting = infile.false_easting[0]
1323    false_northing = infile.false_northing[0]
1324
1325    #Text strings
1326    projection = infile.projection
1327    datum = infile.datum
1328    units = infile.units
1329
1330
1331    #Get output file
1332    if basename_out == None:
1333        ptsname = root + '.pts'
1334    else:
1335        ptsname = basename_out + '.pts'
1336
1337    if verbose: print 'Store to NetCDF file %s' %ptsname
1338    # NetCDF file definition
1339    outfile = NetCDFFile(ptsname, 'w')
1340
1341    #Create new file
1342    outfile.institution = 'Geoscience Australia'
1343    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1344                      'of spatial point data'
1345    #assign default values
1346    if easting_min is None: easting_min = xllcorner
1347    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1348    if northing_min is None: northing_min = yllcorner
1349    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1350
1351    #compute offsets to update georeferencing
1352    easting_offset = xllcorner - easting_min
1353    northing_offset = yllcorner - northing_min
1354
1355    #Georeferencing
1356    outfile.zone = zone
1357    outfile.xllcorner = easting_min #Easting of lower left corner
1358    outfile.yllcorner = northing_min #Northing of lower left corner
1359    outfile.false_easting = false_easting
1360    outfile.false_northing = false_northing
1361
1362    outfile.projection = projection
1363    outfile.datum = datum
1364    outfile.units = units
1365
1366
1367    #Grid info (FIXME: probably not going to be used, but heck)
1368    outfile.ncols = ncols
1369    outfile.nrows = nrows
1370
1371    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1372    totalnopoints = nrows*ncols
1373
1374    # calculating number of NODATA_values for each row in clipped region
1375    #FIXME: use array operations to do faster
1376    nn = 0
1377    k = 0
1378    i1_0 = 0
1379    j1_0 = 0
1380    thisj = 0
1381    thisi = 0
1382    for i in range(nrows):
1383        y = (nrows-i-1)*cellsize + yllcorner
1384        for j in range(ncols):
1385            x = j*cellsize + xllcorner
1386            if easting_min <= x <= easting_max and \
1387               northing_min <= y <= northing_max:
1388                thisj = j
1389                thisi = i
1390                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1391
1392                if k == 0:
1393                    i1_0 = i
1394                    j1_0 = j
1395                k += 1
1396
1397    index1 = j1_0
1398    index2 = thisj
1399
1400    # dimension definitions
1401    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1402    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1403
1404    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1405    nopoints = clippednopoints-nn
1406
1407    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1408
1409    if verbose:
1410        print 'There are %d values in the elevation' %totalnopoints
1411        print 'There are %d values in the clipped elevation' %clippednopoints
1412        print 'There are %d NODATA_values in the clipped elevation' %nn
1413
1414    outfile.createDimension('number_of_points', nopoints)
1415    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1416
1417    # variable definitions
1418    outfile.createVariable('points', Float, ('number_of_points',
1419                                             'number_of_dimensions'))
1420    outfile.createVariable('elevation', Float, ('number_of_points',))
1421
1422    # Get handles to the variables
1423    points = outfile.variables['points']
1424    elevation = outfile.variables['elevation']
1425
1426    lenv = index2-index1+1
1427    #Store data
1428    global_index = 0
1429    #for i in range(nrows):
1430    for i in range(i1_0,thisi+1,1):
1431        if verbose and i%((nrows+10)/10)==0:
1432            print 'Processing row %d of %d' %(i, nrows)
1433
1434        lower_index = global_index
1435
1436        v = dem_elevation_r[i,index1:index2+1]
1437        no_NODATA = sum(v == NODATA_value)
1438        if no_NODATA > 0:
1439            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1440        else:
1441            newcols = lenv #ncols_in_bounding_box
1442
1443        telev = zeros(newcols, Float)
1444        tpoints = zeros((newcols, 2), Float)
1445
1446        local_index = 0
1447
1448        y = (nrows-i-1)*cellsize + yllcorner
1449        #for j in range(ncols):
1450        for j in range(j1_0,index2+1,1):
1451
1452            x = j*cellsize + xllcorner
1453            if easting_min <= x <= easting_max and \
1454               northing_min <= y <= northing_max and \
1455               dem_elevation_r[i,j] <> NODATA_value:
1456                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1457                telev[local_index] = dem_elevation_r[i, j]
1458                global_index += 1
1459                local_index += 1
1460
1461        upper_index = global_index
1462
1463        if upper_index == lower_index + newcols:
1464            points[lower_index:upper_index, :] = tpoints
1465            elevation[lower_index:upper_index] = telev
1466
1467    assert global_index == nopoints, 'index not equal to number of points'
1468
1469    infile.close()
1470    outfile.close()
1471
1472
1473
1474def _read_hecras_cross_sections(lines):
1475    """Return block of surface lines for each cross section
1476    Starts with SURFACE LINE,
1477    Ends with END CROSS-SECTION
1478    """
1479
1480    points = []
1481
1482    reading_surface = False
1483    for i, line in enumerate(lines):
1484
1485        if len(line.strip()) == 0:    #Ignore blanks
1486            continue
1487
1488        if lines[i].strip().startswith('SURFACE LINE'):
1489            reading_surface = True
1490            continue
1491
1492        if lines[i].strip().startswith('END') and reading_surface:
1493            yield points
1494            reading_surface = False
1495            points = []
1496
1497        if reading_surface:
1498            fields = line.strip().split(',')
1499            easting = float(fields[0])
1500            northing = float(fields[1])
1501            elevation = float(fields[2])
1502            points.append([easting, northing, elevation])
1503
1504
1505
1506
1507def hecras_cross_sections2pts(basename_in,
1508                              basename_out=None,
1509                              verbose=False):
1510    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1511
1512    Example:
1513
1514
1515# RAS export file created on Mon 15Aug2005 11:42
1516# by HEC-RAS Version 3.1.1
1517
1518BEGIN HEADER:
1519  UNITS: METRIC
1520  DTM TYPE: TIN
1521  DTM: v:\1\cit\perth_topo\river_tin
1522  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1523  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1524  MAP PROJECTION: UTM
1525  PROJECTION ZONE: 50
1526  DATUM: AGD66
1527  VERTICAL DATUM:
1528  NUMBER OF REACHES:  19
1529  NUMBER OF CROSS-SECTIONS:  14206
1530END HEADER:
1531
1532
1533Only the SURFACE LINE data of the following form will be utilised
1534
1535  CROSS-SECTION:
1536    STREAM ID:Southern-Wungong
1537    REACH ID:Southern-Wungong
1538    STATION:19040.*
1539    CUT LINE:
1540      405548.671603161 , 6438142.7594925
1541      405734.536092045 , 6438326.10404912
1542      405745.130459356 , 6438331.48627354
1543      405813.89633823 , 6438368.6272789
1544    SURFACE LINE:
1545     405548.67,   6438142.76,   35.37
1546     405552.24,   6438146.28,   35.41
1547     405554.78,   6438148.78,   35.44
1548     405555.80,   6438149.79,   35.44
1549     405559.37,   6438153.31,   35.45
1550     405560.88,   6438154.81,   35.44
1551     405562.93,   6438156.83,   35.42
1552     405566.50,   6438160.35,   35.38
1553     405566.99,   6438160.83,   35.37
1554     ...
1555   END CROSS-SECTION
1556
1557    Convert to NetCDF pts format which is
1558
1559    points:  (Nx2) Float array
1560    elevation: N Float array
1561    """
1562
1563    import os
1564    from Scientific.IO.NetCDF import NetCDFFile
1565    from Numeric import Float, zeros, reshape
1566
1567    root = basename_in
1568
1569    #Get ASCII file
1570    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1571
1572    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1573
1574    lines = infile.readlines()
1575    infile.close()
1576
1577    if verbose: print 'Converting to pts format'
1578
1579    i = 0
1580    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1581        i += 1
1582
1583    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('UNITS:')
1587    units = lines[i].strip().split()[1]
1588    i += 1
1589
1590    assert lines[i].strip().upper().startswith('DTM TYPE:')
1591    i += 1
1592
1593    assert lines[i].strip().upper().startswith('DTM:')
1594    i += 1
1595
1596    assert lines[i].strip().upper().startswith('STREAM')
1597    i += 1
1598
1599    assert lines[i].strip().upper().startswith('CROSS')
1600    i += 1
1601
1602    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1603    projection = lines[i].strip().split(':')[1]
1604    i += 1
1605
1606    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1607    zone = int(lines[i].strip().split(':')[1])
1608    i += 1
1609
1610    assert lines[i].strip().upper().startswith('DATUM:')
1611    datum = lines[i].strip().split(':')[1]
1612    i += 1
1613
1614    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1615    i += 1
1616
1617    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1618    i += 1
1619
1620    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1621    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1622    i += 1
1623
1624
1625    #Now read all points
1626    points = []
1627    elevation = []
1628    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1629        for k, entry in enumerate(entries):
1630            points.append(entry[:2])
1631            elevation.append(entry[2])
1632
1633
1634    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1635          %(j+1, number_of_cross_sections)
1636    assert j+1 == number_of_cross_sections, msg
1637
1638    #Get output file
1639    if basename_out == None:
1640        ptsname = root + '.pts'
1641    else:
1642        ptsname = basename_out + '.pts'
1643
1644    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1645    geo = Geospatial_data(points, {"elevation":elevation},
1646                          verbose=verbose, geo_reference=geo_ref)
1647    geo.export_points_file(ptsname)
1648
1649def export_grid(basename_in, extra_name_out = None,
1650                quantities = None, # defaults to elevation
1651                timestep = None,
1652                reduction = None,
1653                cellsize = 10,
1654                NODATA_value = -9999,
1655                easting_min = None,
1656                easting_max = None,
1657                northing_min = None,
1658                northing_max = None,
1659                verbose = False,
1660                origin = None,
1661                datum = 'WGS84',
1662                format = 'ers'):
1663    """
1664   
1665    Wrapper for sww2dem. - see sww2dem to find out what most of the
1666    parameters do.
1667
1668    Quantities is a list of quantities.  Each quantity will be
1669    calculated for each sww file.
1670
1671    This returns the basenames of the files returned, which is made up
1672    of the dir and all of the file name, except the extension.
1673
1674    This function returns the names of the files produced.
1675
1676    It will also produce as many output files as there are input sww files.
1677    """
1678   
1679    if quantities is None:
1680        quantities = ['elevation']
1681       
1682    if type(quantities) is str:
1683            quantities = [quantities]
1684
1685    # How many sww files are there?
1686    dir, base = os.path.split(basename_in)
1687    #print "basename_in",basename_in
1688    #print "base",base
1689
1690    iterate_over = get_all_swwfiles(dir,base,verbose)
1691   
1692    if dir == "":
1693        dir = "." # Unix compatibility
1694   
1695    files_out = []
1696    #print 'sww_file',iterate_over
1697    for sww_file in iterate_over:
1698        for quantity in quantities:
1699            if extra_name_out is None:
1700                basename_out = sww_file + '_' + quantity
1701            else:
1702                basename_out = sww_file + '_' + quantity + '_' \
1703                               + extra_name_out
1704#            print "basename_out", basename_out
1705       
1706            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
1707                               quantity, 
1708                               timestep,
1709                               reduction,
1710                               cellsize,
1711                               NODATA_value,
1712                               easting_min,
1713                               easting_max,
1714                               northing_min,
1715                               northing_max,
1716                               verbose,
1717                               origin,
1718                               datum,
1719                               format)
1720            files_out.append(file_out)
1721    #print "basenames_out after",basenames_out
1722    return files_out
1723
1724
1725def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1726                   plot_quantity, generate_fig = False,
1727                   reportname = None, surface = False, time_min = None,
1728                   time_max = None, title_on = False, verbose = True,
1729                   nodes=None):
1730    """
1731    nodes - number of processes used.
1732
1733    warning - this function has no tests
1734    """
1735    if reportname == None:
1736        report = False
1737    else:
1738        report = True
1739       
1740    if nodes is None:
1741        is_parallel = False
1742    else:
1743        is_parallel = True
1744       
1745    # Generate figures
1746    swwfiles = {}
1747   
1748    if is_parallel is True:   
1749        for i in range(nodes):
1750            print 'Sending node %d of %d' %(i,nodes)
1751            swwfiles = {}
1752            if not reportname == None:
1753                reportname = report_name + '_%s' %(i)
1754            for label_id in production_dirs.keys():
1755                if label_id == 'boundaries':
1756                    swwfile = best_boundary_sww
1757                else:
1758                    file_loc = output_dir + label_id + sep
1759                    sww_extra = '_P%s_%s' %(i,nodes)
1760                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1761                    print 'swwfile',swwfile
1762                    swwfiles[swwfile] = label_id
1763
1764                texname, elev_output = sww2timeseries(swwfiles,
1765                                              gauges_dir_name,
1766                                              production_dirs,
1767                                              report = report,
1768                                              reportname = reportname,
1769                                              plot_quantity = plot_quantity,
1770                                              generate_fig = generate_fig,
1771                                              surface = surface,
1772                                              time_min = time_min,
1773                                              time_max = time_max,
1774                                              title_on = title_on,
1775                                              verbose = verbose)
1776    else:   
1777        for label_id in production_dirs.keys():       
1778            if label_id == 'boundaries':
1779                print 'boundaries'
1780                file_loc = project.boundaries_in_dir
1781                swwfile = project.boundaries_dir_name3 + '.sww'
1782                #  swwfile = boundary_dir_filename
1783            else:
1784                file_loc = output_dir + label_id + sep
1785                swwfile = file_loc + scenario_name + '.sww'
1786            swwfiles[swwfile] = label_id
1787       
1788        texname, elev_output = sww2timeseries(swwfiles,
1789                                              gauges_dir_name,
1790                                              production_dirs,
1791                                              report = report,
1792                                              reportname = reportname,
1793                                              plot_quantity = plot_quantity,
1794                                              generate_fig = generate_fig,
1795                                              surface = surface,
1796                                              time_min = time_min,
1797                                              time_max = time_max,
1798                                              title_on = title_on,
1799                                              verbose = verbose)
1800                                         
1801
1802   
1803def sww2dem(basename_in, basename_out = None,
1804            quantity = None, # defaults to elevation
1805            timestep = None,
1806            reduction = None,
1807            cellsize = 10,
1808            NODATA_value = -9999,
1809            easting_min = None,
1810            easting_max = None,
1811            northing_min = None,
1812            northing_max = None,
1813            verbose = False,
1814            origin = None,
1815            datum = 'WGS84',
1816            format = 'ers'):
1817
1818    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1819
1820    Example (ASC):
1821
1822    ncols         3121
1823    nrows         1800
1824    xllcorner     722000
1825    yllcorner     5893000
1826    cellsize      25
1827    NODATA_value  -9999
1828    138.3698 137.4194 136.5062 135.5558 ..........
1829
1830    Also write accompanying file with same basename_in but extension .prj
1831    used to fix the UTM zone, datum, false northings and eastings.
1832
1833    The prj format is assumed to be as
1834
1835    Projection    UTM
1836    Zone          56
1837    Datum         WGS84
1838    Zunits        NO
1839    Units         METERS
1840    Spheroid      WGS84
1841    Xshift        0.0000000000
1842    Yshift        10000000.0000000000
1843    Parameters
1844
1845
1846    The parameter quantity must be the name of an existing quantity or
1847    an expression involving existing quantities. The default is
1848    'elevation'. Quantity is not a list of quantities.
1849
1850    if timestep (an index) is given, output quantity at that timestep
1851
1852    if reduction is given use that to reduce quantity over all timesteps.
1853
1854    datum
1855
1856    format can be either 'asc' or 'ers'
1857    """
1858
1859    import sys
1860    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1861    from Numeric import array2string
1862
1863    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1864    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1865
1866    msg = 'Format must be either asc or ers'
1867    assert format.lower() in ['asc', 'ers'], msg
1868
1869
1870    false_easting = 500000
1871    false_northing = 10000000
1872
1873    if quantity is None:
1874        quantity = 'elevation'
1875       
1876    if reduction is None:
1877        reduction = max
1878
1879    if basename_out is None:
1880        basename_out = basename_in + '_%s' %quantity
1881
1882    if quantity_formula.has_key(quantity):
1883        quantity = quantity_formula[quantity]
1884       
1885    swwfile = basename_in + '.sww'
1886    demfile = basename_out + '.' + format
1887    # Note the use of a .ers extension is optional (write_ermapper_grid will
1888    # deal with either option
1889   
1890    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
1891   
1892    # Read sww file
1893    if verbose: 
1894        print 'Reading from %s' %swwfile
1895        print 'Output directory is %s' %basename_out
1896   
1897    from Scientific.IO.NetCDF import NetCDFFile
1898    fid = NetCDFFile(swwfile)
1899
1900    #Get extent and reference
1901    x = fid.variables['x'][:]
1902    y = fid.variables['y'][:]
1903    volumes = fid.variables['volumes'][:]
1904    if timestep is not None:
1905        times = fid.variables['time'][timestep]
1906    else:
1907        times = fid.variables['time'][:]
1908
1909    number_of_timesteps = fid.dimensions['number_of_timesteps']
1910    number_of_points = fid.dimensions['number_of_points']
1911   
1912    if origin is None:
1913
1914        # Get geo_reference
1915        # sww files don't have to have a geo_ref
1916        try:
1917            geo_reference = Geo_reference(NetCDFObject=fid)
1918        except AttributeError, e:
1919            geo_reference = Geo_reference() # Default georef object
1920
1921        xllcorner = geo_reference.get_xllcorner()
1922        yllcorner = geo_reference.get_yllcorner()
1923        zone = geo_reference.get_zone()
1924    else:
1925        zone = origin[0]
1926        xllcorner = origin[1]
1927        yllcorner = origin[2]
1928
1929
1930
1931    # FIXME: Refactor using code from Interpolation_function.statistics (in interpolate.py)
1932    # Something like print swwstats(swwname)
1933    if verbose:
1934        print '------------------------------------------------'
1935        print 'Statistics of SWW file:'
1936        print '  Name: %s' %swwfile
1937        print '  Reference:'
1938        print '    Lower left corner: [%f, %f]'\
1939              %(xllcorner, yllcorner)
1940        if timestep is not None:
1941            print '    Time: %f' %(times)
1942        else:
1943            print '    Start time: %f' %fid.starttime[0]
1944        print '  Extent:'
1945        print '    x [m] in [%f, %f], len(x) == %d'\
1946              %(min(x.flat), max(x.flat), len(x.flat))
1947        print '    y [m] in [%f, %f], len(y) == %d'\
1948              %(min(y.flat), max(y.flat), len(y.flat))
1949        if timestep is not None:
1950            print '    t [s] = %f, len(t) == %d' %(times, 1)
1951        else:
1952            print '    t [s] in [%f, %f], len(t) == %d'\
1953              %(min(times), max(times), len(times))
1954        print '  Quantities [SI units]:'
1955        for name in ['stage', 'xmomentum', 'ymomentum']:
1956            q = fid.variables[name][:].flat
1957            if timestep is not None:
1958                q = q[timestep*len(x):(timestep+1)*len(x)]
1959            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1960        for name in ['elevation']:
1961            q = fid.variables[name][:].flat
1962            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1963           
1964    # Get quantity and reduce if applicable
1965    if verbose: print 'Processing quantity %s' %quantity
1966
1967    # Turn NetCDF objects into Numeric arrays
1968    quantity_dict = {}
1969    for name in fid.variables.keys():
1970        quantity_dict[name] = fid.variables[name][:] 
1971
1972
1973    # Convert quantity expression to quantities found in sww file   
1974    q = apply_expression_to_dictionary(quantity, quantity_dict)
1975
1976    if len(q.shape) == 2:
1977        #q has a time component and needs to be reduced along
1978        #the temporal dimension
1979        if verbose: print 'Reducing quantity %s' %quantity
1980        q_reduced = zeros( number_of_points, Float )
1981       
1982        if timestep is not None:
1983            for k in range(number_of_points):
1984                q_reduced[k] = q[timestep,k] 
1985        else:
1986            for k in range(number_of_points):
1987                q_reduced[k] = reduction( q[:,k] )
1988
1989        q = q_reduced
1990
1991    #Post condition: Now q has dimension: number_of_points
1992    assert len(q.shape) == 1
1993    assert q.shape[0] == number_of_points
1994
1995
1996    if verbose:
1997        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1998
1999
2000    #Create grid and update xll/yll corner and x,y
2001
2002    #Relative extent
2003    if easting_min is None:
2004        xmin = min(x)
2005    else:
2006        xmin = easting_min - xllcorner
2007
2008    if easting_max is None:
2009        xmax = max(x)
2010    else:
2011        xmax = easting_max - xllcorner
2012
2013    if northing_min is None:
2014        ymin = min(y)
2015    else:
2016        ymin = northing_min - yllcorner
2017
2018    if northing_max is None:
2019        ymax = max(y)
2020    else:
2021        ymax = northing_max - yllcorner
2022
2023
2024
2025    if verbose: print 'Creating grid'
2026    ncols = int((xmax-xmin)/cellsize)+1
2027    nrows = int((ymax-ymin)/cellsize)+1
2028
2029
2030    #New absolute reference and coordinates
2031    newxllcorner = xmin+xllcorner
2032    newyllcorner = ymin+yllcorner
2033
2034    x = x+xllcorner-newxllcorner
2035    y = y+yllcorner-newyllcorner
2036   
2037    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2038    assert len(vertex_points.shape) == 2
2039
2040    grid_points = zeros ( (ncols*nrows, 2), Float )
2041
2042
2043    for i in xrange(nrows):
2044        if format.lower() == 'asc':
2045            yg = i*cellsize
2046        else:
2047        #this will flip the order of the y values for ers
2048            yg = (nrows-i)*cellsize
2049
2050        for j in xrange(ncols):
2051            xg = j*cellsize
2052            k = i*ncols + j
2053
2054            grid_points[k,0] = xg
2055            grid_points[k,1] = yg
2056
2057    #Interpolate
2058    from anuga.fit_interpolate.interpolate import Interpolate
2059
2060    # Remove loners from vertex_points, volumes here
2061    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
2062    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
2063    #import sys; sys.exit()
2064    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2065
2066    #Interpolate using quantity values
2067    if verbose: print 'Interpolating'
2068    grid_values = interp.interpolate(q, grid_points).flat
2069
2070
2071    if verbose:
2072        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2073                                                      max(grid_values))
2074
2075    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2076    P = interp.mesh.get_boundary_polygon()
2077    outside_indices = outside_polygon(grid_points, P, closed=True)
2078
2079    for i in outside_indices:
2080        grid_values[i] = NODATA_value
2081
2082
2083
2084
2085    if format.lower() == 'ers':
2086        # setup ERS header information
2087        grid_values = reshape(grid_values,(nrows, ncols))
2088        header = {}
2089        header['datum'] = '"' + datum + '"'
2090        # FIXME The use of hardwired UTM and zone number needs to be made optional
2091        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2092        header['projection'] = '"UTM-' + str(zone) + '"'
2093        header['coordinatetype'] = 'EN'
2094        if header['coordinatetype'] == 'LL':
2095            header['longitude'] = str(newxllcorner)
2096            header['latitude'] = str(newyllcorner)
2097        elif header['coordinatetype'] == 'EN':
2098            header['eastings'] = str(newxllcorner)
2099            header['northings'] = str(newyllcorner)
2100        header['nullcellvalue'] = str(NODATA_value)
2101        header['xdimension'] = str(cellsize)
2102        header['ydimension'] = str(cellsize)
2103        header['value'] = '"' + quantity + '"'
2104        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2105
2106
2107        #Write
2108        if verbose: print 'Writing %s' %demfile
2109        import ermapper_grids
2110        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2111
2112        fid.close()
2113    else:
2114        #Write to Ascii format
2115
2116        #Write prj file
2117        prjfile = basename_out + '.prj'
2118
2119        if verbose: print 'Writing %s' %prjfile
2120        prjid = open(prjfile, 'w')
2121        prjid.write('Projection    %s\n' %'UTM')
2122        prjid.write('Zone          %d\n' %zone)
2123        prjid.write('Datum         %s\n' %datum)
2124        prjid.write('Zunits        NO\n')
2125        prjid.write('Units         METERS\n')
2126        prjid.write('Spheroid      %s\n' %datum)
2127        prjid.write('Xshift        %d\n' %false_easting)
2128        prjid.write('Yshift        %d\n' %false_northing)
2129        prjid.write('Parameters\n')
2130        prjid.close()
2131
2132
2133
2134        if verbose: print 'Writing %s' %demfile
2135
2136        ascid = open(demfile, 'w')
2137
2138        ascid.write('ncols         %d\n' %ncols)
2139        ascid.write('nrows         %d\n' %nrows)
2140        ascid.write('xllcorner     %d\n' %newxllcorner)
2141        ascid.write('yllcorner     %d\n' %newyllcorner)
2142        ascid.write('cellsize      %f\n' %cellsize)
2143        ascid.write('NODATA_value  %d\n' %NODATA_value)
2144
2145
2146        #Get bounding polygon from mesh
2147        #P = interp.mesh.get_boundary_polygon()
2148        #inside_indices = inside_polygon(grid_points, P)
2149
2150        for i in range(nrows):
2151            if verbose and i%((nrows+10)/10)==0:
2152                print 'Doing row %d of %d' %(i, nrows)
2153
2154            base_index = (nrows-i-1)*ncols
2155
2156            slice = grid_values[base_index:base_index+ncols]
2157            s = array2string(slice, max_line_width=sys.maxint)
2158            ascid.write(s[1:-1] + '\n')
2159
2160
2161            #print
2162            #for j in range(ncols):
2163            #    index = base_index+j#
2164            #    print grid_values[index],
2165            #    ascid.write('%f ' %grid_values[index])
2166            #ascid.write('\n')
2167
2168        #Close
2169        ascid.close()
2170        fid.close()
2171
2172        return basename_out
2173
2174#Backwards compatibility
2175def sww2asc(basename_in, basename_out = None,
2176            quantity = None,
2177            timestep = None,
2178            reduction = None,
2179            cellsize = 10,
2180            verbose = False,
2181            origin = None):
2182    print 'sww2asc will soon be obsoleted - please use sww2dem'
2183    sww2dem(basename_in,
2184            basename_out = basename_out,
2185            quantity = quantity,
2186            timestep = timestep,
2187            reduction = reduction,
2188            cellsize = cellsize,
2189            verbose = verbose,
2190            origin = origin,
2191        datum = 'WGS84',
2192        format = 'asc')
2193
2194def sww2ers(basename_in, basename_out = None,
2195            quantity = None,
2196            timestep = None,
2197            reduction = None,
2198            cellsize = 10,
2199            verbose = False,
2200            origin = None,
2201            datum = 'WGS84'):
2202    print 'sww2ers will soon be obsoleted - please use sww2dem'
2203    sww2dem(basename_in,
2204            basename_out = basename_out,
2205            quantity = quantity,
2206            timestep = timestep,
2207            reduction = reduction,
2208            cellsize = cellsize,
2209            verbose = verbose,
2210            origin = origin,
2211            datum = datum,
2212            format = 'ers')
2213################################# END COMPATIBILITY ##############
2214
2215
2216
2217def sww2pts(basename_in, basename_out=None,
2218            data_points=None,
2219            quantity=None,
2220            timestep=None,
2221            reduction=None,
2222            NODATA_value=-9999,
2223            verbose=False,
2224            origin=None):
2225            #datum = 'WGS84')
2226
2227
2228    """Read SWW file and convert to interpolated values at selected points
2229
2230    The parameter quantity' must be the name of an existing quantity or
2231    an expression involving existing quantities. The default is
2232    'elevation'.
2233
2234    if timestep (an index) is given, output quantity at that timestep
2235
2236    if reduction is given use that to reduce quantity over all timesteps.
2237
2238    data_points (Nx2 array) give locations of points where quantity is to be computed.
2239   
2240    """
2241
2242    import sys
2243    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2244    from Numeric import array2string
2245
2246    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2247    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2248
2249    from anuga.geospatial_data.geospatial_data import Geospatial_data
2250
2251    if quantity is None:
2252        quantity = 'elevation'
2253
2254    if reduction is None:
2255        reduction = max
2256
2257    if basename_out is None:
2258        basename_out = basename_in + '_%s' %quantity
2259
2260    swwfile = basename_in + '.sww'
2261    ptsfile = basename_out + '.pts'
2262
2263    # Read sww file
2264    if verbose: print 'Reading from %s' %swwfile
2265    from Scientific.IO.NetCDF import NetCDFFile
2266    fid = NetCDFFile(swwfile)
2267
2268    # Get extent and reference
2269    x = fid.variables['x'][:]
2270    y = fid.variables['y'][:]
2271    volumes = fid.variables['volumes'][:]
2272
2273    number_of_timesteps = fid.dimensions['number_of_timesteps']
2274    number_of_points = fid.dimensions['number_of_points']
2275    if origin is None:
2276
2277        # Get geo_reference
2278        # sww files don't have to have a geo_ref
2279        try:
2280            geo_reference = Geo_reference(NetCDFObject=fid)
2281        except AttributeError, e:
2282            geo_reference = Geo_reference() #Default georef object
2283
2284        xllcorner = geo_reference.get_xllcorner()
2285        yllcorner = geo_reference.get_yllcorner()
2286        zone = geo_reference.get_zone()
2287    else:
2288        zone = origin[0]
2289        xllcorner = origin[1]
2290        yllcorner = origin[2]
2291
2292
2293
2294    # FIXME: Refactor using code from file_function.statistics
2295    # Something like print swwstats(swwname)
2296    if verbose:
2297        x = fid.variables['x'][:]
2298        y = fid.variables['y'][:]
2299        times = fid.variables['time'][:]
2300        print '------------------------------------------------'
2301        print 'Statistics of SWW file:'
2302        print '  Name: %s' %swwfile
2303        print '  Reference:'
2304        print '    Lower left corner: [%f, %f]'\
2305              %(xllcorner, yllcorner)
2306        print '    Start time: %f' %fid.starttime[0]
2307        print '  Extent:'
2308        print '    x [m] in [%f, %f], len(x) == %d'\
2309              %(min(x.flat), max(x.flat), len(x.flat))
2310        print '    y [m] in [%f, %f], len(y) == %d'\
2311              %(min(y.flat), max(y.flat), len(y.flat))
2312        print '    t [s] in [%f, %f], len(t) == %d'\
2313              %(min(times), max(times), len(times))
2314        print '  Quantities [SI units]:'
2315        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2316            q = fid.variables[name][:].flat
2317            print '    %s in [%f, %f]' %(name, min(q), max(q))
2318
2319
2320
2321    # Get quantity and reduce if applicable
2322    if verbose: print 'Processing quantity %s' %quantity
2323
2324    # Turn NetCDF objects into Numeric arrays
2325    quantity_dict = {}
2326    for name in fid.variables.keys():
2327        quantity_dict[name] = fid.variables[name][:]
2328
2329
2330
2331    # Convert quantity expression to quantities found in sww file   
2332    q = apply_expression_to_dictionary(quantity, quantity_dict)
2333
2334
2335
2336    if len(q.shape) == 2:
2337        # q has a time component and needs to be reduced along
2338        # the temporal dimension
2339        if verbose: print 'Reducing quantity %s' %quantity
2340        q_reduced = zeros( number_of_points, Float )
2341
2342        for k in range(number_of_points):
2343            q_reduced[k] = reduction( q[:,k] )
2344
2345        q = q_reduced
2346
2347    # Post condition: Now q has dimension: number_of_points
2348    assert len(q.shape) == 1
2349    assert q.shape[0] == number_of_points
2350
2351
2352    if verbose:
2353        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2354
2355
2356    # Create grid and update xll/yll corner and x,y
2357    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2358    assert len(vertex_points.shape) == 2
2359
2360    # Interpolate
2361    from anuga.fit_interpolate.interpolate import Interpolate
2362    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2363
2364    # Interpolate using quantity values
2365    if verbose: print 'Interpolating'
2366    interpolated_values = interp.interpolate(q, data_points).flat
2367
2368
2369    if verbose:
2370        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2371                                                      max(interpolated_values))
2372
2373    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2374    P = interp.mesh.get_boundary_polygon()
2375    outside_indices = outside_polygon(data_points, P, closed=True)
2376
2377    for i in outside_indices:
2378        interpolated_values[i] = NODATA_value
2379
2380
2381    # Store results   
2382    G = Geospatial_data(data_points=data_points,
2383                        attributes=interpolated_values)
2384
2385    G.export_points_file(ptsfile, absolute = True)
2386
2387    fid.close()
2388
2389
2390def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2391                                  use_cache = False,
2392                                  verbose = False):
2393    """Read Digitial Elevation model from the following ASCII format (.asc)
2394
2395    Example:
2396
2397    ncols         3121
2398    nrows         1800
2399    xllcorner     722000
2400    yllcorner     5893000
2401    cellsize      25
2402    NODATA_value  -9999
2403    138.3698 137.4194 136.5062 135.5558 ..........
2404
2405    Convert basename_in + '.asc' to NetCDF format (.dem)
2406    mimicking the ASCII format closely.
2407
2408
2409    An accompanying file with same basename_in but extension .prj must exist
2410    and is used to fix the UTM zone, datum, false northings and eastings.
2411
2412    The prj format is assumed to be as
2413
2414    Projection    UTM
2415    Zone          56
2416    Datum         WGS84
2417    Zunits        NO
2418    Units         METERS
2419    Spheroid      WGS84
2420    Xshift        0.0000000000
2421    Yshift        10000000.0000000000
2422    Parameters
2423    """
2424
2425
2426
2427    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2428
2429    if use_cache is True:
2430        from caching import cache
2431        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2432                       dependencies = [basename_in + '.asc'],
2433                       verbose = verbose)
2434
2435    else:
2436        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2437
2438    return result
2439
2440
2441
2442
2443
2444
2445def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2446                                  verbose = False):
2447    """Read Digitial Elevation model from the following ASCII format (.asc)
2448
2449    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2450    """
2451
2452    import os
2453    from Scientific.IO.NetCDF import NetCDFFile
2454    from Numeric import Float, array
2455
2456    #root, ext = os.path.splitext(basename_in)
2457    root = basename_in
2458
2459    ###########################################
2460    # Read Meta data
2461    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2462    metadatafile = open(root + '.prj')
2463    metalines = metadatafile.readlines()
2464    metadatafile.close()
2465
2466    L = metalines[0].strip().split()
2467    assert L[0].strip().lower() == 'projection'
2468    projection = L[1].strip()                   #TEXT
2469
2470    L = metalines[1].strip().split()
2471    assert L[0].strip().lower() == 'zone'
2472    zone = int(L[1].strip())
2473
2474    L = metalines[2].strip().split()
2475    assert L[0].strip().lower() == 'datum'
2476    datum = L[1].strip()                        #TEXT
2477
2478    L = metalines[3].strip().split()
2479    assert L[0].strip().lower() == 'zunits'     #IGNORE
2480    zunits = L[1].strip()                       #TEXT
2481
2482    L = metalines[4].strip().split()
2483    assert L[0].strip().lower() == 'units'
2484    units = L[1].strip()                        #TEXT
2485
2486    L = metalines[5].strip().split()
2487    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2488    spheroid = L[1].strip()                     #TEXT
2489
2490    L = metalines[6].strip().split()
2491    assert L[0].strip().lower() == 'xshift'
2492    false_easting = float(L[1].strip())
2493
2494    L = metalines[7].strip().split()
2495    assert L[0].strip().lower() == 'yshift'
2496    false_northing = float(L[1].strip())
2497
2498    #print false_easting, false_northing, zone, datum
2499
2500
2501    ###########################################
2502    #Read DEM data
2503
2504    datafile = open(basename_in + '.asc')
2505
2506    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2507    lines = datafile.readlines()
2508    datafile.close()
2509
2510    if verbose: print 'Got', len(lines), ' lines'
2511
2512    ncols = int(lines[0].split()[1].strip())
2513    nrows = int(lines[1].split()[1].strip())
2514    xllcorner = float(lines[2].split()[1].strip())
2515    yllcorner = float(lines[3].split()[1].strip())
2516    cellsize = float(lines[4].split()[1].strip())
2517    NODATA_value = int(lines[5].split()[1].strip())
2518
2519    assert len(lines) == nrows + 6
2520
2521
2522    ##########################################
2523
2524
2525    if basename_out == None:
2526        netcdfname = root + '.dem'
2527    else:
2528        netcdfname = basename_out + '.dem'
2529
2530    if verbose: print 'Store to NetCDF file %s' %netcdfname
2531    # NetCDF file definition
2532    fid = NetCDFFile(netcdfname, 'w')
2533
2534    #Create new file
2535    fid.institution = 'Geoscience Australia'
2536    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2537                      'of spatial point data'
2538
2539    fid.ncols = ncols
2540    fid.nrows = nrows
2541    fid.xllcorner = xllcorner
2542    fid.yllcorner = yllcorner
2543    fid.cellsize = cellsize
2544    fid.NODATA_value = NODATA_value
2545
2546    fid.zone = zone
2547    fid.false_easting = false_easting
2548    fid.false_northing = false_northing
2549    fid.projection = projection
2550    fid.datum = datum
2551    fid.units = units
2552
2553
2554    # dimension definitions
2555    fid.createDimension('number_of_rows', nrows)
2556    fid.createDimension('number_of_columns', ncols)
2557
2558    # variable definitions
2559    fid.createVariable('elevation', Float, ('number_of_rows',
2560                                            'number_of_columns'))
2561
2562    # Get handles to the variables
2563    elevation = fid.variables['elevation']
2564
2565    #Store data
2566    n = len(lines[6:])
2567    for i, line in enumerate(lines[6:]):
2568        fields = line.split()
2569        if verbose and i%((n+10)/10)==0:
2570            print 'Processing row %d of %d' %(i, nrows)
2571
2572        elevation[i, :] = array([float(x) for x in fields])
2573
2574    fid.close()
2575
2576
2577
2578
2579
2580def ferret2sww(basename_in, basename_out = None,
2581               verbose = False,
2582               minlat = None, maxlat = None,
2583               minlon = None, maxlon = None,
2584               mint = None, maxt = None, mean_stage = 0,
2585               origin = None, zscale = 1,
2586               fail_on_NaN = True,
2587               NaN_filler = 0,
2588               elevation = None,
2589               inverted_bathymetry = True
2590               ): #FIXME: Bathymetry should be obtained
2591                                  #from MOST somehow.
2592                                  #Alternatively from elsewhere
2593                                  #or, as a last resort,
2594                                  #specified here.
2595                                  #The value of -100 will work
2596                                  #for the Wollongong tsunami
2597                                  #scenario but is very hacky
2598    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2599    sww format native to abstract_2d_finite_volumes.
2600
2601    Specify only basename_in and read files of the form
2602    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2603    relative height, x-velocity and y-velocity, respectively.
2604
2605    Also convert latitude and longitude to UTM. All coordinates are
2606    assumed to be given in the GDA94 datum.
2607
2608    min's and max's: If omitted - full extend is used.
2609    To include a value min may equal it, while max must exceed it.
2610    Lat and lon are assuemd to be in decimal degrees
2611
2612    origin is a 3-tuple with geo referenced
2613    UTM coordinates (zone, easting, northing)
2614
2615    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2616    which means that longitude is the fastest
2617    varying dimension (row major order, so to speak)
2618
2619    ferret2sww uses grid points as vertices in a triangular grid
2620    counting vertices from lower left corner upwards, then right
2621    """
2622
2623    import os
2624    from Scientific.IO.NetCDF import NetCDFFile
2625    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2626    from Numeric import allclose, around
2627
2628    precision = Float
2629
2630    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2631
2632    if minlat != None:
2633        assert -90 < minlat < 90 , msg
2634    if maxlat != None:
2635        assert -90 < maxlat < 90 , msg
2636        if minlat != None:
2637            assert maxlat > minlat
2638    if minlon != None:
2639        assert -180 < minlon < 180 , msg
2640    if maxlon != None:
2641        assert -180 < maxlon < 180 , msg
2642        if minlon != None:
2643            assert maxlon > minlon
2644       
2645
2646
2647    #Get NetCDF data
2648    if verbose: print 'Reading files %s_*.nc' %basename_in
2649    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2650    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2651    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2652    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2653    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2654
2655    if basename_out is None:
2656        swwname = basename_in + '.sww'
2657    else:
2658        swwname = basename_out + '.sww'
2659
2660    # Get dimensions of file_h
2661    for dimension in file_h.dimensions.keys():
2662        if dimension[:3] == 'LON':
2663            dim_h_longitude = dimension
2664        if dimension[:3] == 'LAT':
2665            dim_h_latitude = dimension
2666        if dimension[:4] == 'TIME':
2667            dim_h_time = dimension
2668
2669#    print 'long:', dim_h_longitude
2670#    print 'lats:', dim_h_latitude
2671#    print 'times:', dim_h_time
2672
2673    times = file_h.variables[dim_h_time]
2674    latitudes = file_h.variables[dim_h_latitude]
2675    longitudes = file_h.variables[dim_h_longitude]
2676   
2677    # get dimensions for file_e
2678    for dimension in file_e.dimensions.keys():
2679        if dimension[:3] == 'LON':
2680            dim_e_longitude = dimension
2681        if dimension[:3] == 'LAT':
2682            dim_e_latitude = dimension
2683
2684    # get dimensions for file_u
2685    for dimension in file_u.dimensions.keys():
2686        if dimension[:3] == 'LON':
2687            dim_u_longitude = dimension
2688        if dimension[:3] == 'LAT':
2689            dim_u_latitude = dimension
2690        if dimension[:4] == 'TIME':
2691            dim_u_time = dimension
2692           
2693    # get dimensions for file_v
2694    for dimension in file_v.dimensions.keys():
2695        if dimension[:3] == 'LON':
2696            dim_v_longitude = dimension
2697        if dimension[:3] == 'LAT':
2698            dim_v_latitude = dimension
2699        if dimension[:4] == 'TIME':
2700            dim_v_time = dimension
2701
2702
2703    # Precision used by most for lat/lon is 4 or 5 decimals
2704    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2705    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2706
2707    # Check that files are compatible
2708    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2709    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2710    assert allclose(latitudes, e_lat)
2711
2712    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2713    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2714    assert allclose(longitudes, e_lon)
2715
2716    if mint is None:
2717        jmin = 0
2718        mint = times[0]       
2719    else:
2720        jmin = searchsorted(times, mint)
2721       
2722    if maxt is None:
2723        jmax = len(times)
2724        maxt = times[-1]
2725    else:
2726        jmax = searchsorted(times, maxt)
2727
2728    #print "latitudes[:]",latitudes[:]
2729    #print "longitudes[:]",longitudes [:]
2730    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2731                                                  longitudes[:],
2732                                                  minlat, maxlat,
2733                                                  minlon, maxlon)
2734
2735
2736    times = times[jmin:jmax]
2737    latitudes = latitudes[kmin:kmax]
2738    longitudes = longitudes[lmin:lmax]
2739
2740    #print "latitudes[:]",latitudes[:]
2741    #print "longitudes[:]",longitudes [:]
2742
2743    if verbose: print 'cropping'
2744    zname = 'ELEVATION'
2745
2746    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2747    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2748    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2749    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2750
2751    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2752    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2753    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2754    #        from Numeric import asarray
2755    #        elevations=elevations.tolist()
2756    #        elevations.reverse()
2757    #        elevations=asarray(elevations)
2758    #    else:
2759    #        from Numeric import asarray
2760    #        elevations=elevations.tolist()
2761    #        elevations.reverse()
2762    #        elevations=asarray(elevations)
2763    #        'print hmmm'
2764
2765
2766
2767    #Get missing values
2768    nan_ha = file_h.variables['HA'].missing_value[0]
2769    nan_ua = file_u.variables['UA'].missing_value[0]
2770    nan_va = file_v.variables['VA'].missing_value[0]
2771    if hasattr(file_e.variables[zname],'missing_value'):
2772        nan_e  = file_e.variables[zname].missing_value[0]
2773    else:
2774        nan_e = None
2775
2776    #Cleanup
2777    from Numeric import sometrue
2778
2779    missing = (amplitudes == nan_ha)
2780    if sometrue (missing):
2781        if fail_on_NaN:
2782            msg = 'NetCDFFile %s contains missing values'\
2783                  %(basename_in+'_ha.nc')
2784            raise DataMissingValuesError, msg
2785        else:
2786            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2787
2788    missing = (uspeed == nan_ua)
2789    if sometrue (missing):
2790        if fail_on_NaN:
2791            msg = 'NetCDFFile %s contains missing values'\
2792                  %(basename_in+'_ua.nc')
2793            raise DataMissingValuesError, msg
2794        else:
2795            uspeed = uspeed*(missing==0) + missing*NaN_filler
2796
2797    missing = (vspeed == nan_va)
2798    if sometrue (missing):
2799        if fail_on_NaN:
2800            msg = 'NetCDFFile %s contains missing values'\
2801                  %(basename_in+'_va.nc')
2802            raise DataMissingValuesError, msg
2803        else:
2804            vspeed = vspeed*(missing==0) + missing*NaN_filler
2805
2806
2807    missing = (elevations == nan_e)
2808    if sometrue (missing):
2809        if fail_on_NaN:
2810            msg = 'NetCDFFile %s contains missing values'\
2811                  %(basename_in+'_e.nc')
2812            raise DataMissingValuesError, msg
2813        else:
2814            elevations = elevations*(missing==0) + missing*NaN_filler
2815
2816    #######
2817
2818
2819
2820    number_of_times = times.shape[0]
2821    number_of_latitudes = latitudes.shape[0]
2822    number_of_longitudes = longitudes.shape[0]
2823
2824    assert amplitudes.shape[0] == number_of_times
2825    assert amplitudes.shape[1] == number_of_latitudes
2826    assert amplitudes.shape[2] == number_of_longitudes
2827
2828    if verbose:
2829        print '------------------------------------------------'
2830        print 'Statistics:'
2831        print '  Extent (lat/lon):'
2832        print '    lat in [%f, %f], len(lat) == %d'\
2833              %(min(latitudes.flat), max(latitudes.flat),
2834                len(latitudes.flat))
2835        print '    lon in [%f, %f], len(lon) == %d'\
2836              %(min(longitudes.flat), max(longitudes.flat),
2837                len(longitudes.flat))
2838        print '    t in [%f, %f], len(t) == %d'\
2839              %(min(times.flat), max(times.flat), len(times.flat))
2840
2841        q = amplitudes.flat
2842        name = 'Amplitudes (ha) [cm]'
2843        print %s in [%f, %f]' %(name, min(q), max(q))
2844
2845        q = uspeed.flat
2846        name = 'Speeds (ua) [cm/s]'
2847        print %s in [%f, %f]' %(name, min(q), max(q))
2848
2849        q = vspeed.flat
2850        name = 'Speeds (va) [cm/s]'
2851        print %s in [%f, %f]' %(name, min(q), max(q))
2852
2853        q = elevations.flat
2854        name = 'Elevations (e) [m]'
2855        print %s in [%f, %f]' %(name, min(q), max(q))
2856
2857
2858    #print number_of_latitudes, number_of_longitudes
2859    number_of_points = number_of_latitudes*number_of_longitudes
2860    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2861
2862
2863    file_h.close()
2864    file_u.close()
2865    file_v.close()
2866    file_e.close()
2867
2868
2869    # NetCDF file definition
2870    outfile = NetCDFFile(swwname, 'w')
2871
2872    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2873                  %(basename_in + '_ha.nc',
2874                    basename_in + '_ua.nc',
2875                    basename_in + '_va.nc',
2876                    basename_in + '_e.nc')
2877   
2878    #Create new file
2879    starttime = times[0]
2880    sww = Write_sww()
2881    sww.header(outfile, times, number_of_volumes,
2882                         number_of_points, description=description,
2883                         verbose=verbose)
2884
2885    #Store
2886    from anuga.coordinate_transforms.redfearn import redfearn
2887    x = zeros(number_of_points, Float)  #Easting
2888    y = zeros(number_of_points, Float)  #Northing
2889
2890
2891    if verbose: print 'Making triangular grid'
2892    #Check zone boundaries
2893    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2894
2895    vertices = {}
2896    i = 0
2897    for k, lat in enumerate(latitudes):       #Y direction
2898        for l, lon in enumerate(longitudes):  #X direction
2899
2900            vertices[l,k] = i
2901
2902            zone, easting, northing = redfearn(lat,lon)
2903
2904            msg = 'Zone boundary crossed at longitude =', lon
2905            #assert zone == refzone, msg
2906            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2907            x[i] = easting
2908            y[i] = northing
2909            i += 1
2910
2911    #Construct 2 triangles per 'rectangular' element
2912    volumes = []
2913    for l in range(number_of_longitudes-1):    #X direction
2914        for k in range(number_of_latitudes-1): #Y direction
2915            v1 = vertices[l,k+1]
2916            v2 = vertices[l,k]
2917            v3 = vertices[l+1,k+1]
2918            v4 = vertices[l+1,k]
2919
2920            volumes.append([v1,v2,v3]) #Upper element
2921            volumes.append([v4,v3,v2]) #Lower element
2922
2923    volumes = array(volumes)
2924
2925    if origin is None:
2926        origin = Geo_reference(refzone,min(x),min(y))
2927    geo_ref = write_NetCDF_georeference(origin, outfile)
2928   
2929    if elevation is not None:
2930        z = elevation
2931    else:
2932        if inverted_bathymetry:
2933            z = -1*elevations
2934        else:
2935            z = elevations
2936    #FIXME: z should be obtained from MOST and passed in here
2937
2938    from Numeric import resize
2939    z = resize(z,outfile.variables['z'][:].shape)
2940    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2941    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
2942    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2943    outfile.variables['elevation'][:] = z
2944    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2945
2946
2947
2948    #Time stepping
2949    stage = outfile.variables['stage']
2950    xmomentum = outfile.variables['xmomentum']
2951    ymomentum = outfile.variables['ymomentum']
2952
2953    if verbose: print 'Converting quantities'
2954    n = len(times)
2955    for j in range(n):
2956        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2957        i = 0
2958        for k in range(number_of_latitudes):      #Y direction
2959            for l in range(number_of_longitudes): #X direction
2960                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2961                stage[j,i] = w
2962                h = w - z[i]
2963                xmomentum[j,i] = uspeed[j,k,l]/100*h
2964                ymomentum[j,i] = vspeed[j,k,l]/100*h
2965                i += 1
2966
2967    #outfile.close()
2968
2969    #FIXME: Refactor using code from file_function.statistics
2970    #Something like print swwstats(swwname)
2971    if verbose:
2972        x = outfile.variables['x'][:]
2973        y = outfile.variables['y'][:]
2974        print '------------------------------------------------'
2975        print 'Statistics of output file:'
2976        print '  Name: %s' %swwname
2977        print '  Reference:'
2978        print '    Lower left corner: [%f, %f]'\
2979              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
2980        print ' Start time: %f' %starttime
2981        print '    Min time: %f' %mint
2982        print '    Max time: %f' %maxt
2983        print '  Extent:'
2984        print '    x [m] in [%f, %f], len(x) == %d'\
2985              %(min(x.flat), max(x.flat), len(x.flat))
2986        print '    y [m] in [%f, %f], len(y) == %d'\
2987              %(min(y.flat), max(y.flat), len(y.flat))
2988        print '    t [s] in [%f, %f], len(t) == %d'\
2989              %(min(times), max(times), len(times))
2990        print '  Quantities [SI units]:'
2991        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2992            q = outfile.variables[name][:].flat
2993            print '    %s in [%f, %f]' %(name, min(q), max(q))
2994
2995
2996
2997    outfile.close()
2998
2999
3000
3001
3002
3003def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3004    """Template for converting typical text files with time series to
3005    NetCDF tms file.
3006
3007
3008    The file format is assumed to be either two fields separated by a comma:
3009
3010        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3011
3012    E.g
3013
3014      31/08/04 00:00:00, 1.328223 0 0
3015      31/08/04 00:15:00, 1.292912 0 0
3016
3017    or time (seconds), value0 value1 value2 ...
3018   
3019      0.0, 1.328223 0 0
3020      0.1, 1.292912 0 0
3021
3022    will provide a time dependent function f(t) with three attributes
3023
3024    filename is assumed to be the rootname with extenisons .txt and .sww
3025    """
3026
3027    import time, calendar
3028    from Numeric import array
3029    from anuga.config import time_format
3030    from anuga.utilities.numerical_tools import ensure_numeric
3031
3032
3033
3034    fid = open(filename + '.txt')
3035    line = fid.readline()
3036    fid.close()
3037
3038    fields = line.split(',')
3039    msg = 'File %s must have the format date, value0 value1 value2 ...'
3040    assert len(fields) == 2, msg
3041
3042    if not time_as_seconds:
3043        try:
3044            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3045        except ValueError:
3046            msg = 'First field in file %s must be' %filename
3047            msg += ' date-time with format %s.\n' %time_format
3048            msg += 'I got %s instead.' %fields[0]
3049            raise DataTimeError, msg
3050    else:
3051        try:
3052            starttime = float(fields[0])
3053        except Error:
3054            msg = "Bad time format"
3055            raise DataTimeError, msg
3056
3057
3058    #Split values
3059    values = []
3060    for value in fields[1].split():
3061        values.append(float(value))
3062
3063    q = ensure_numeric(values)
3064
3065    msg = 'ERROR: File must contain at least one independent value'
3066    assert len(q.shape) == 1, msg
3067
3068
3069
3070    #Read times proper
3071    from Numeric import zeros, Float, alltrue
3072    from anuga.config import time_format
3073    import time, calendar
3074
3075    fid = open(filename + '.txt')
3076    lines = fid.readlines()
3077    fid.close()
3078
3079    N = len(lines)
3080    d = len(q)
3081
3082    T = zeros(N, Float)       #Time
3083    Q = zeros((N, d), Float)  #Values
3084
3085    for i, line in enumerate(lines):
3086        fields = line.split(',')
3087        if not time_as_seconds:
3088            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3089        else:
3090             realtime = float(fields[0])
3091        T[i] = realtime - starttime
3092
3093        for j, value in enumerate(fields[1].split()):
3094            Q[i, j] = float(value)
3095
3096    msg = 'File %s must list time as a monotonuosly ' %filename
3097    msg += 'increasing sequence'
3098    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3099
3100    #Create NetCDF file
3101    from Scientific.IO.NetCDF import NetCDFFile
3102
3103    fid = NetCDFFile(filename + '.tms', 'w')
3104
3105
3106    fid.institution = 'Geoscience Australia'
3107    fid.description = 'Time series'
3108
3109
3110    #Reference point
3111    #Start time in seconds since the epoch (midnight 1/1/1970)
3112    #FIXME: Use Georef
3113    fid.starttime = starttime
3114
3115    # dimension definitions
3116    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3117    #fid.createDimension('number_of_vertices', 3)
3118
3119
3120    fid.createDimension('number_of_timesteps', len(T))
3121
3122    fid.createVariable('time', Float, ('number_of_timesteps',))
3123
3124    fid.variables['time'][:] = T
3125
3126    for i in range(Q.shape[1]):
3127        try:
3128            name = quantity_names[i]
3129        except:
3130            name = 'Attribute%d'%i
3131
3132        fid.createVariable(name, Float, ('number_of_timesteps',))
3133        fid.variables[name][:] = Q[:,i]
3134
3135    fid.close()
3136
3137
3138def extent_sww(file_name):
3139    """
3140    Read in an sww file.
3141
3142    Input;
3143    file_name - the sww file
3144
3145    Output;
3146    z - Vector of bed elevation
3147    volumes - Array.  Each row has 3 values, representing
3148    the vertices that define the volume
3149    time - Vector of the times where there is stage information
3150    stage - array with respect to time and vertices (x,y)
3151    """
3152
3153
3154    from Scientific.IO.NetCDF import NetCDFFile
3155
3156    #Check contents
3157    #Get NetCDF
3158    fid = NetCDFFile(file_name, 'r')
3159
3160    # Get the variables
3161    x = fid.variables['x'][:]
3162    y = fid.variables['y'][:]
3163    stage = fid.variables['stage'][:]
3164    #print "stage",stage
3165    #print "stage.shap",stage.shape
3166    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3167    #print "min(stage)",min(stage)
3168
3169    fid.close()
3170
3171    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3172
3173
3174def sww2domain(filename,boundary=None,t=None,\
3175               fail_if_NaN=True,NaN_filler=0\
3176               ,verbose = False,very_verbose = False):
3177    """
3178    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3179
3180    Boundary is not recommended if domain.smooth is not selected, as it
3181    uses unique coordinates, but not unique boundaries. This means that
3182    the boundary file will not be compatable with the coordinates, and will
3183    give a different final boundary, or crash.
3184    """
3185    NaN=9.969209968386869e+036
3186    #initialise NaN.
3187
3188    from Scientific.IO.NetCDF import NetCDFFile
3189    from shallow_water import Domain
3190    from Numeric import asarray, transpose, resize
3191
3192    if verbose: print 'Reading from ', filename
3193    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3194    time = fid.variables['time']       #Timesteps
3195    if t is None:
3196        t = time[-1]
3197    time_interp = get_time_interp(time,t)
3198
3199    # Get the variables as Numeric arrays
3200    x = fid.variables['x'][:]             #x-coordinates of vertices
3201    y = fid.variables['y'][:]             #y-coordinates of vertices
3202    elevation = fid.variables['elevation']     #Elevation
3203    stage = fid.variables['stage']     #Water level
3204    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3205    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3206
3207    starttime = fid.starttime[0]
3208    volumes = fid.variables['volumes'][:] #Connectivity
3209    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3210
3211    conserved_quantities = []
3212    interpolated_quantities = {}
3213    other_quantities = []
3214
3215    # get geo_reference
3216    #sww files don't have to have a geo_ref
3217    try:
3218        geo_reference = Geo_reference(NetCDFObject=fid)
3219    except: #AttributeError, e:
3220        geo_reference = None
3221
3222    if verbose: print '    getting quantities'
3223    for quantity in fid.variables.keys():
3224        dimensions = fid.variables[quantity].dimensions
3225        if 'number_of_timesteps' in dimensions:
3226            conserved_quantities.append(quantity)
3227            interpolated_quantities[quantity]=\
3228                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3229        else: other_quantities.append(quantity)
3230
3231    other_quantities.remove('x')
3232    other_quantities.remove('y')
3233    other_quantities.remove('z')
3234    other_quantities.remove('volumes')
3235    try:
3236        other_quantities.remove('stage_range')
3237        other_quantities.remove('xmomentum_range')
3238        other_quantities.remove('ymomentum_range')
3239        other_quantities.remove('elevation_range')
3240    except:
3241        pass
3242       
3243
3244    conserved_quantities.remove('time')
3245
3246    if verbose: print '    building domain'
3247    #    From domain.Domain:
3248    #    domain = Domain(coordinates, volumes,\
3249    #                    conserved_quantities = conserved_quantities,\
3250    #                    other_quantities = other_quantities,zone=zone,\
3251    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3252
3253    #   From shallow_water.Domain:
3254    coordinates=coordinates.tolist()
3255    volumes=volumes.tolist()
3256    #FIXME:should this be in mesh?(peter row)
3257    if fid.smoothing == 'Yes': unique = False
3258    else: unique = True
3259    if unique:
3260        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3261
3262
3263    try:
3264        domain = Domain(coordinates, volumes, boundary)
3265    except AssertionError, e:
3266        fid.close()
3267        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3268        raise DataDomainError, msg
3269
3270    if not boundary is None:
3271        domain.boundary = boundary
3272
3273    domain.geo_reference = geo_reference
3274
3275    domain.starttime=float(starttime)+float(t)
3276    domain.time=0.0
3277
3278    for quantity in other_quantities:
3279        try:
3280            NaN = fid.variables[quantity].missing_value
3281        except:
3282            pass #quantity has no missing_value number
3283        X = fid.variables[quantity][:]
3284        if very_verbose:
3285            print '       ',quantity
3286            print '        NaN =',NaN
3287            print '        max(X)'
3288            print '       ',max(X)
3289            print '        max(X)==NaN'
3290            print '       ',max(X)==NaN
3291            print ''
3292        if (max(X)==NaN) or (min(X)==NaN):
3293            if fail_if_NaN:
3294                msg = 'quantity "%s" contains no_data entry'%quantity
3295                raise DataMissingValuesError, msg
3296            else:
3297                data = (X<>NaN)
3298                X = (X*data)+(data==0)*NaN_filler
3299        if unique:
3300            X = resize(X,(len(X)/3,3))
3301        domain.set_quantity(quantity,X)
3302    #
3303    for quantity in conserved_quantities:
3304        try:
3305            NaN = fid.variables[quantity].missing_value
3306        except:
3307            pass #quantity has no missing_value number
3308        X = interpolated_quantities[quantity]
3309        if very_verbose:
3310            print '       ',quantity
3311            print '        NaN =',NaN
3312            print '        max(X)'
3313            print '       ',max(X)
3314            print '        max(X)==NaN'
3315            print '       ',max(X)==NaN
3316            print ''
3317        if (max(X)==NaN) or (min(X)==NaN):
3318            if fail_if_NaN:
3319                msg = 'quantity "%s" contains no_data entry'%quantity
3320                raise DataMissingValuesError, msg
3321            else:
3322                data = (X<>NaN)
3323                X = (X*data)+(data==0)*NaN_filler
3324        if unique:
3325            X = resize(X,(X.shape[0]/3,3))
3326        domain.set_quantity(quantity,X)
3327
3328    fid.close()
3329    return domain
3330
3331def interpolated_quantity(saved_quantity,time_interp):
3332
3333    #given an index and ratio, interpolate quantity with respect to time.
3334    index,ratio = time_interp
3335    Q = saved_quantity
3336    if ratio > 0:
3337        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3338    else:
3339        q = Q[index]
3340    #Return vector of interpolated values
3341    return q
3342
3343def get_time_interp(time,t=None):
3344    #Finds the ratio and index for time interpolation.
3345    #It is borrowed from previous abstract_2d_finite_volumes code.
3346    if t is None:
3347        t=time[-1]
3348        index = -1
3349        ratio = 0.
3350    else:
3351        T = time
3352        tau = t
3353        index=0
3354        msg = 'Time interval derived from file %s [%s:%s]'\
3355            %('FIXMEfilename', T[0], T[-1])
3356        msg += ' does not match model time: %s' %tau
3357        if tau < time[0]: raise DataTimeError, msg
3358        if tau > time[-1]: raise DataTimeError, msg
3359        while tau > time[index]: index += 1
3360        while tau < time[index]: index -= 1
3361        if tau == time[index]:
3362            #Protect against case where tau == time[-1] (last time)
3363            # - also works in general when tau == time[i]
3364            ratio = 0
3365        else:
3366            #t is now between index and index+1
3367            ratio = (tau - time[index])/(time[index+1] - time[index])
3368    return (index,ratio)
3369
3370
3371def weed(coordinates,volumes,boundary = None):
3372    if type(coordinates)==ArrayType:
3373        coordinates = coordinates.tolist()
3374    if type(volumes)==ArrayType:
3375        volumes = volumes.tolist()
3376
3377    unique = False
3378    point_dict = {}
3379    same_point = {}
3380    for i in range(len(coordinates)):
3381        point = tuple(coordinates[i])
3382        if point_dict.has_key(point):
3383            unique = True
3384            same_point[i]=point
3385            #to change all point i references to point j
3386        else:
3387            point_dict[point]=i
3388            same_point[i]=point
3389
3390    coordinates = []
3391    i = 0
3392    for point in point_dict.keys():
3393        point = tuple(point)
3394        coordinates.append(list(point))
3395        point_dict[point]=i
3396        i+=1
3397
3398
3399    for volume in volumes:
3400        for i in range(len(volume)):
3401            index = volume[i]
3402            if index>-1:
3403                volume[i]=point_dict[same_point[index]]
3404
3405    new_boundary = {}
3406    if not boundary is None:
3407        for segment in boundary.keys():
3408            point0 = point_dict[same_point[segment[0]]]
3409            point1 = point_dict[same_point[segment[1]]]
3410            label = boundary[segment]
3411            #FIXME should the bounday attributes be concaterated
3412            #('exterior, pond') or replaced ('pond')(peter row)
3413
3414            if new_boundary.has_key((point0,point1)):
3415                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3416                                              #+','+label
3417
3418            elif new_boundary.has_key((point1,point0)):
3419                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3420                                              #+','+label
3421            else: new_boundary[(point0,point1)]=label
3422
3423        boundary = new_boundary
3424
3425    return coordinates,volumes,boundary
3426
3427
3428def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3429                 verbose=False):
3430    """Read Digitial Elevation model from the following NetCDF format (.dem)
3431
3432    Example:
3433
3434    ncols         3121
3435    nrows         1800
3436    xllcorner     722000
3437    yllcorner     5893000
3438    cellsize      25
3439    NODATA_value  -9999
3440    138.3698 137.4194 136.5062 135.5558 ..........
3441
3442    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3443    """
3444
3445    import os
3446    from Scientific.IO.NetCDF import NetCDFFile
3447    from Numeric import Float, zeros, sum, reshape, equal
3448
3449    root = basename_in
3450    inname = root + '.dem'
3451
3452    #Open existing netcdf file to read
3453    infile = NetCDFFile(inname, 'r')
3454    if verbose: print 'Reading DEM from %s' %inname
3455
3456    #Read metadata
3457    ncols = infile.ncols[0]
3458    nrows = infile.nrows[0]
3459    xllcorner = infile.xllcorner[0]
3460    yllcorner = infile.yllcorner[0]
3461    cellsize = infile.cellsize[0]
3462    NODATA_value = infile.NODATA_value[0]
3463    zone = infile.zone[0]
3464    false_easting = infile.false_easting[0]
3465    false_northing = infile.false_northing[0]
3466    projection = infile.projection
3467    datum = infile.datum
3468    units = infile.units
3469
3470    dem_elevation = infile.variables['elevation']
3471
3472    #Get output file name
3473    if basename_out == None:
3474        outname = root + '_' + repr(cellsize_new) + '.dem'
3475    else:
3476        outname = basename_out + '.dem'
3477
3478    if verbose: print 'Write decimated NetCDF file to %s' %outname
3479
3480    #Determine some dimensions for decimated grid
3481    (nrows_stencil, ncols_stencil) = stencil.shape
3482    x_offset = ncols_stencil / 2
3483    y_offset = nrows_stencil / 2
3484    cellsize_ratio = int(cellsize_new / cellsize)
3485    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3486    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3487
3488    #Open netcdf file for output
3489    outfile = NetCDFFile(outname, 'w')
3490
3491    #Create new file
3492    outfile.institution = 'Geoscience Australia'
3493    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3494                           'of spatial point data'
3495    #Georeferencing
3496    outfile.zone = zone
3497    outfile.projection = projection
3498    outfile.datum = datum
3499    outfile.units = units
3500
3501    outfile.cellsize = cellsize_new
3502    outfile.NODATA_value = NODATA_value
3503    outfile.false_easting = false_easting
3504    outfile.false_northing = false_northing
3505
3506    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3507    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3508    outfile.ncols = ncols_new
3509    outfile.nrows = nrows_new
3510
3511    # dimension definition
3512    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3513
3514    # variable definition
3515    outfile.createVariable('elevation', Float, ('number_of_points',))
3516
3517    # Get handle to the variable
3518    elevation = outfile.variables['elevation']
3519
3520    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3521
3522    #Store data
3523    global_index = 0
3524    for i in range(nrows_new):
3525        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3526        lower_index = global_index
3527        telev =  zeros(ncols_new, Float)
3528        local_index = 0
3529        trow = i * cellsize_ratio
3530
3531        for j in range(ncols_new):
3532            tcol = j * cellsize_ratio
3533            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3534
3535            #if dem contains 1 or more NODATA_values set value in
3536            #decimated dem to NODATA_value, else compute decimated
3537            #value using stencil
3538            if sum(sum(equal(tmp, NODATA_value))) > 0:
3539                telev[local_index] = NODATA_value
3540            else:
3541                telev[local_index] = sum(sum(tmp * stencil))
3542
3543            global_index += 1
3544            local_index += 1
3545
3546        upper_index = global_index
3547
3548        elevation[lower_index:upper_index] = telev
3549
3550    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3551
3552    infile.close()
3553    outfile.close()
3554
3555
3556
3557
3558def tsh2sww(filename, verbose=False): #test_tsh2sww
3559    """
3560    to check if a tsh/msh file 'looks' good.
3561    """
3562
3563
3564    if verbose == True:print 'Creating domain from', filename
3565    domain = pmesh_to_domain_instance(filename, Domain)
3566    if verbose == True:print "Number of triangles = ", len(domain)
3567
3568    domain.smooth = True
3569    domain.format = 'sww'   #Native netcdf visualisation format
3570    file_path, filename = path.split(filename)
3571    filename, ext = path.splitext(filename)
3572    domain.set_name(filename)   
3573    domain.reduction = mean
3574    if verbose == True:print "file_path",file_path
3575    if file_path == "":file_path = "."
3576    domain.set_datadir(file_path)
3577
3578    if verbose == True:
3579        print "Output written to " + domain.get_datadir() + sep + \
3580              domain.get_name() + "." + domain.format
3581    sww = get_dataobject(domain)
3582    sww.store_connectivity()
3583    sww.store_timestep('stage')
3584
3585
3586def asc_csiro2sww(bath_dir,
3587                  elevation_dir,
3588                  ucur_dir,
3589                  vcur_dir,
3590                  sww_file,
3591                  minlat = None, maxlat = None,
3592                  minlon = None, maxlon = None,
3593                  zscale=1,
3594                  mean_stage = 0,
3595                  fail_on_NaN = True,
3596                  elevation_NaN_filler = 0,
3597                  bath_prefix='ba',
3598                  elevation_prefix='el',
3599                  verbose=False):
3600    """
3601    Produce an sww boundary file, from esri ascii data from CSIRO.
3602
3603    Also convert latitude and longitude to UTM. All coordinates are
3604    assumed to be given in the GDA94 datum.
3605
3606    assume:
3607    All files are in esri ascii format
3608
3609    4 types of information
3610    bathymetry
3611    elevation
3612    u velocity
3613    v velocity
3614
3615    Assumptions
3616    The metadata of all the files is the same
3617    Each type is in a seperate directory
3618    One bath file with extention .000
3619    The time period is less than 24hrs and uniform.
3620    """
3621    from Scientific.IO.NetCDF import NetCDFFile
3622
3623    from anuga.coordinate_transforms.redfearn import redfearn
3624
3625    precision = Float # So if we want to change the precision its done here
3626
3627    # go in to the bath dir and load the only file,
3628    bath_files = os.listdir(bath_dir)
3629
3630    bath_file = bath_files[0]
3631    bath_dir_file =  bath_dir + os.sep + bath_file
3632    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3633
3634    #Use the date.time of the bath file as a basis for
3635    #the start time for other files
3636    base_start = bath_file[-12:]
3637
3638    #go into the elevation dir and load the 000 file
3639    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3640                         + base_start
3641
3642    elevation_files = os.listdir(elevation_dir)
3643    ucur_files = os.listdir(ucur_dir)
3644    vcur_files = os.listdir(vcur_dir)
3645    elevation_files.sort()
3646    # the first elevation file should be the
3647    # file with the same base name as the bath data
3648    assert elevation_files[0] == 'el' + base_start
3649
3650    number_of_latitudes = bath_grid.shape[0]
3651    number_of_longitudes = bath_grid.shape[1]
3652    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3653
3654    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3655                  for x in range(number_of_longitudes)]
3656    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3657                 for y in range(number_of_latitudes)]
3658
3659     # reverse order of lat, so the fist lat represents the first grid row
3660    latitudes.reverse()
3661
3662    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3663                                                 minlat=minlat, maxlat=maxlat,
3664                                                 minlon=minlon, maxlon=maxlon)
3665
3666
3667    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3668    latitudes = latitudes[kmin:kmax]
3669    longitudes = longitudes[lmin:lmax]
3670    number_of_latitudes = len(latitudes)
3671    number_of_longitudes = len(longitudes)
3672    number_of_times = len(os.listdir(elevation_dir))
3673    number_of_points = number_of_latitudes*number_of_longitudes
3674    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3675
3676    #Work out the times
3677    if len(elevation_files) > 1:
3678        # Assume: The time period is less than 24hrs.
3679        time_period = (int(elevation_files[1][-3:]) - \
3680                      int(elevation_files[0][-3:]))*60*60
3681        times = [x*time_period for x in range(len(elevation_files))]
3682    else:
3683        times = [0.0]
3684
3685
3686    if verbose:
3687        print '------------------------------------------------'
3688        print 'Statistics:'
3689        print '  Extent (lat/lon):'
3690        print '    lat in [%f, %f], len(lat) == %d'\
3691              %(min(latitudes), max(latitudes),
3692                len(latitudes))
3693        print '    lon in [%f, %f], len(lon) == %d'\
3694              %(min(longitudes), max(longitudes),
3695                len(longitudes))
3696        print '    t in [%f, %f], len(t) == %d'\
3697              %(min(times), max(times), len(times))
3698
3699    ######### WRITE THE SWW FILE #############
3700    # NetCDF file definition
3701    outfile = NetCDFFile(sww_file, 'w')
3702
3703    #Create new file
3704    outfile.institution = 'Geoscience Australia'
3705    outfile.description = 'Converted from XXX'
3706
3707
3708    #For sww compatibility
3709    outfile.smoothing = 'Yes'
3710    outfile.order = 1
3711
3712    #Start time in seconds since the epoch (midnight 1/1/1970)
3713    outfile.starttime = starttime = times[0]
3714
3715
3716    # dimension definitions
3717    outfile.createDimension('number_of_volumes', number_of_volumes)
3718
3719    outfile.createDimension('number_of_vertices', 3)
3720    outfile.createDimension('number_of_points', number_of_points)
3721    outfile.createDimension('number_of_timesteps', number_of_times)
3722
3723    # variable definitions
3724    outfile.createVariable('x', precision, ('number_of_points',))
3725    outfile.createVariable('y', precision, ('number_of_points',))
3726    outfile.createVariable('elevation', precision, ('number_of_points',))
3727
3728    #FIXME: Backwards compatibility
3729    outfile.createVariable('z', precision, ('number_of_points',))
3730    #################################
3731
3732    outfile.createVariable('volumes', Int, ('number_of_volumes',
3733                                            'number_of_vertices'))
3734
3735    outfile.createVariable('time', precision,
3736                           ('number_of_timesteps',))
3737
3738    outfile.createVariable('stage', precision,
3739                           ('number_of_timesteps',
3740                            'number_of_points'))
3741
3742    outfile.createVariable('xmomentum', precision,
3743                           ('number_of_timesteps',
3744                            'number_of_points'))
3745
3746    outfile.createVariable('ymomentum', precision,
3747                           ('number_of_timesteps',
3748                            'number_of_points'))
3749
3750    #Store
3751    from anuga.coordinate_transforms.redfearn import redfearn
3752    x = zeros(number_of_points, Float)  #Easting
3753    y = zeros(number_of_points, Float)  #Northing
3754
3755    if verbose: print 'Making triangular grid'
3756    #Get zone of 1st point.
3757    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3758
3759    vertices = {}
3760    i = 0
3761    for k, lat in enumerate(latitudes):
3762        for l, lon in enumerate(longitudes):
3763
3764            vertices[l,k] = i
3765
3766            zone, easting, northing = redfearn(lat,lon)
3767
3768            msg = 'Zone boundary crossed at longitude =', lon
3769            #assert zone == refzone, msg
3770            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3771            x[i] = easting
3772            y[i] = northing
3773            i += 1
3774
3775
3776    #Construct 2 triangles per 'rectangular' element
3777    volumes = []
3778    for l in range(number_of_longitudes-1):    #X direction
3779        for k in range(number_of_latitudes-1): #Y direction
3780            v1 = vertices[l,k+1]
3781            v2 = vertices[l,k]
3782            v3 = vertices[l+1,k+1]
3783            v4 = vertices[l+1,k]
3784
3785            #Note, this is different to the ferrit2sww code
3786            #since the order of the lats is reversed.
3787            volumes.append([v1,v3,v2]) #Upper element
3788            volumes.append([v4,v2,v3]) #Lower element
3789
3790    volumes = array(volumes)
3791
3792    geo_ref = Geo_reference(refzone,min(x),min(y))
3793    geo_ref.write_NetCDF(outfile)
3794
3795    # This will put the geo ref in the middle
3796    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3797
3798
3799    if verbose:
3800        print '------------------------------------------------'
3801        print 'More Statistics:'
3802        print '  Extent (/lon):'
3803        print '    x in [%f, %f], len(lat) == %d'\
3804              %(min(x), max(x),
3805                len(x))
3806        print '    y in [%f, %f], len(lon) == %d'\
3807              %(min(y), max(y),
3808                len(y))
3809        print 'geo_ref: ',geo_ref
3810
3811    z = resize(bath_grid,outfile.variables['z'][:].shape)
3812    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3813    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3814    outfile.variables['z'][:] = z
3815    outfile.variables['elevation'][:] = z  #FIXME HACK
3816    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3817
3818    stage = outfile.variables['stage']
3819    xmomentum = outfile.variables['xmomentum']
3820    ymomentum = outfile.variables['ymomentum']
3821
3822    outfile.variables['time'][:] = times   #Store time relative
3823
3824    if verbose: print 'Converting quantities'
3825    n = number_of_times
3826    for j in range(number_of_times):
3827        # load in files
3828        elevation_meta, elevation_grid = \
3829           _read_asc(elevation_dir + os.sep + elevation_files[j])
3830
3831        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3832        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3833
3834        #cut matrix to desired size
3835        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3836        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3837        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3838       
3839        # handle missing values
3840        missing = (elevation_grid == elevation_meta['NODATA_value'])
3841        if sometrue (missing):
3842            if fail_on_NaN:
3843                msg = 'File %s contains missing values'\
3844                      %(elevation_files[j])
3845                raise DataMissingValuesError, msg
3846            else:
3847                elevation_grid = elevation_grid*(missing==0) + \
3848                                 missing*elevation_NaN_filler
3849
3850
3851        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3852        i = 0
3853        for k in range(number_of_latitudes):      #Y direction
3854            for l in range(number_of_longitudes): #X direction
3855                w = zscale*elevation_grid[k,l] + mean_stage
3856                stage[j,i] = w
3857                h = w - z[i]
3858                xmomentum[j,i] = u_momentum_grid[k,l]*h
3859                ymomentum[j,i] = v_momentum_grid[k,l]*h
3860                i += 1
3861    outfile.close()
3862
3863def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3864                        minlat=None, maxlat=None,
3865                        minlon=None, maxlon=None):
3866    """
3867    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3868    specified with min/max lat/long
3869
3870    Think of the latitudes and longitudes describing a 2d surface.
3871    The area returned is, if possible, just big enough to cover the
3872    inputed max/min area. (This will not be possible if the max/min area
3873    has a section outside of the latitudes/longitudes area.)
3874
3875    asset  longitudes are sorted,
3876    long - from low to high (west to east, eg 148 - 151)
3877    assert latitudes are sorted, ascending or decending
3878    """
3879    latitudes = latitudes_ref[:]
3880    longitudes = longitudes_ref[:]
3881
3882    latitudes = ensure_numeric(latitudes)
3883    longitudes = ensure_numeric(longitudes)
3884   
3885    assert allclose(sort(longitudes), longitudes)
3886   
3887    lat_ascending = True
3888    if not allclose(sort(latitudes), latitudes):
3889        lat_ascending = False
3890        # reverse order of lat, so it's in ascending order         
3891        latitudes = latitudes[::-1]
3892        assert allclose(sort(latitudes), latitudes)
3893    #print "latitudes  in funct", latitudes
3894   
3895    largest_lat_index = len(latitudes)-1
3896    #Cut out a smaller extent.
3897    if minlat == None:
3898        lat_min_index = 0
3899    else:
3900        lat_min_index = searchsorted(latitudes, minlat)-1
3901        if lat_min_index <0:
3902            lat_min_index = 0
3903
3904
3905    if maxlat == None:
3906        lat_max_index = largest_lat_index #len(latitudes)
3907    else:
3908        lat_max_index = searchsorted(latitudes, maxlat)
3909        if lat_max_index > largest_lat_index:
3910            lat_max_index = largest_lat_index
3911
3912    if minlon == None:
3913        lon_min_index = 0
3914    else:
3915        lon_min_index = searchsorted(longitudes, minlon)-1
3916        if lon_min_index <0:
3917            lon_min_index = 0
3918
3919    if maxlon == None:
3920        lon_max_index = len(longitudes)
3921    else:
3922        lon_max_index = searchsorted(longitudes, maxlon)
3923
3924    # Reversing the indexes, if the lat array is decending
3925    if lat_ascending is False:
3926        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3927                                       largest_lat_index - lat_min_index
3928    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3929    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3930
3931    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3932
3933
3934def _read_asc(filename, verbose=False):
3935    """Read esri file from the following ASCII format (.asc)
3936
3937    Example:
3938
3939    ncols         3121
3940    nrows         1800
3941    xllcorner     722000
3942    yllcorner     5893000
3943    cellsize      25
3944    NODATA_value  -9999
3945    138.3698 137.4194 136.5062 135.5558 ..........
3946
3947    """
3948
3949    datafile = open(filename)
3950
3951    if verbose: print 'Reading DEM from %s' %(filename)
3952    lines = datafile.readlines()
3953    datafile.close()
3954
3955    if verbose: print 'Got', len(lines), ' lines'
3956
3957    ncols = int(lines.pop(0).split()[1].strip())
3958    nrows = int(lines.pop(0).split()[1].strip())
3959    xllcorner = float(lines.pop(0).split()[1].strip())
3960    yllcorner = float(lines.pop(0).split()[1].strip())
3961    cellsize = float(lines.pop(0).split()[1].strip())
3962    NODATA_value = float(lines.pop(0).split()[1].strip())
3963
3964    assert len(lines) == nrows
3965
3966    #Store data
3967    grid = []
3968
3969    n = len(lines)
3970    for i, line in enumerate(lines):
3971        cells = line.split()
3972        assert len(cells) == ncols
3973        grid.append(array([float(x) for x in cells]))
3974    grid = array(grid)
3975
3976    return {'xllcorner':xllcorner,
3977            'yllcorner':yllcorner,
3978            'cellsize':cellsize,
3979            'NODATA_value':NODATA_value}, grid
3980
3981
3982
3983    ####  URS 2 SWW  ###
3984
3985lon_name = 'LON'
3986lat_name = 'LAT'
3987time_name = 'TIME'
3988precision = Float # So if we want to change the precision its done here       
3989class Write_nc:
3990    """
3991    Write an nc file.
3992   
3993    Note, this should be checked to meet cdc netcdf conventions for gridded
3994    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
3995   
3996    """
3997    def __init__(self,
3998                 quantity_name,
3999                 file_name,
4000                 time_step_count,
4001                 time_step,
4002                 lon,
4003                 lat):
4004        """
4005        time_step_count is the number of time steps.
4006        time_step is the time step size
4007       
4008        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4009        """
4010        self.quantity_name = quantity_name
4011        quantity_units = {'HA':'CENTIMETERS',
4012                              'UA':'CENTIMETERS/SECOND',
4013                              'VA':'CENTIMETERS/SECOND'}       
4014       
4015        multiplier_dic = {'HA':100.0, # To convert from m to cm
4016                              'UA':100.0,  #  m/s to cm/sec
4017                              'VA':-100.0}  # MUX files have positve x in the
4018        # Southern direction.  This corrects for it, when writing nc files.
4019       
4020        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4021       
4022        #self.file_name = file_name
4023        self.time_step_count = time_step_count
4024        self.time_step = time_step
4025
4026        # NetCDF file definition
4027        self.outfile = NetCDFFile(file_name, 'w')
4028        outfile = self.outfile       
4029
4030        #Create new file
4031        nc_lon_lat_header(outfile, lon, lat)
4032   
4033        # TIME
4034        outfile.createDimension(time_name, None)
4035        outfile.createVariable(time_name, precision, (time_name,))
4036
4037        #QUANTITY
4038        outfile.createVariable(self.quantity_name, precision,
4039                               (time_name, lat_name, lon_name))
4040        outfile.variables[self.quantity_name].missing_value=-1.e+034
4041        outfile.variables[self.quantity_name].units= \
4042                                 quantity_units[self.quantity_name]
4043        outfile.variables[lon_name][:]= ensure_numeric(lon)
4044        outfile.variables[lat_name][:]= ensure_numeric(lat)
4045
4046        #Assume no one will be wanting to read this, while we are writing
4047        #outfile.close()
4048       
4049    def store_timestep(self, quantity_slice):
4050        """
4051        Write a time slice of quantity info
4052        quantity_slice is the data to be stored at this time step
4053        """
4054       
4055        outfile = self.outfile
4056       
4057        # Get the variables
4058        time = outfile.variables[time_name]
4059        quantity = outfile.variables[self.quantity_name]
4060           
4061        i = len(time)
4062
4063        #Store time
4064        time[i] = i*self.time_step #self.domain.time
4065        quantity[i,:] = quantity_slice* self.quantity_multiplier
4066       
4067    def close(self):
4068        self.outfile.close()
4069
4070def urs2sww(basename_in='o', basename_out=None, verbose=False,
4071            remove_nc_files=True,
4072            minlat=None, maxlat=None,
4073            minlon= None, maxlon=None,
4074            mint=None, maxt=None,
4075            mean_stage=0,
4076            origin = None,
4077            zscale=1,
4078            fail_on_NaN=True,
4079            NaN_filler=0,
4080            elevation=None):
4081    """
4082    Convert URS C binary format for wave propagation to
4083    sww format native to abstract_2d_finite_volumes.
4084
4085    Specify only basename_in and read files of the form
4086    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4087    basefilename_waveheight-n-mux containing relative height,
4088    x-velocity and y-velocity, respectively.
4089
4090    Also convert latitude and longitude to UTM. All coordinates are
4091    assumed to be given in the GDA94 datum. The latitude and longitude
4092    information is for  a grid.
4093
4094    min's and max's: If omitted - full extend is used.
4095    To include a value min may equal it, while max must exceed it.
4096    Lat and lon are assumed to be in decimal degrees.
4097    NOTE: minlon is the most east boundary.
4098   
4099    origin is a 3-tuple with geo referenced
4100    UTM coordinates (zone, easting, northing)
4101    It will be the origin of the sww file. This shouldn't be used,
4102    since all of anuga should be able to handle an arbitary origin.
4103
4104
4105    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4106    which means that latitude is the fastest
4107    varying dimension (row major order, so to speak)
4108
4109    In URS C binary the latitudes and longitudes are in assending order.
4110    """
4111    if basename_out == None:
4112        basename_out = basename_in
4113    files_out = urs2nc(basename_in, basename_out)
4114    ferret2sww(basename_out,
4115               minlat=minlat,
4116               maxlat=maxlat,
4117               minlon=minlon,
4118               maxlon=maxlon,
4119               mint=mint,
4120               maxt=maxt,
4121               mean_stage=mean_stage,
4122               origin=origin,
4123               zscale=zscale,
4124               fail_on_NaN=fail_on_NaN,
4125               NaN_filler=NaN_filler,
4126               inverted_bathymetry=True,
4127               verbose=verbose)
4128    #print "files_out",files_out
4129    if remove_nc_files:
4130        for file_out in files_out:
4131            os.remove(file_out)
4132   
4133def urs2nc(basename_in = 'o', basename_out = 'urs'):
4134    """
4135    Convert the 3 urs files to 4 nc files.
4136
4137    The name of the urs file names must be;
4138    [basename_in]_velocity-z-mux
4139    [basename_in]_velocity-e-mux
4140    [basename_in]_waveheight-n-mux
4141   
4142    """
4143   
4144    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4145                basename_in + EAST_VELOCITY_LABEL,
4146                basename_in + NORTH_VELOCITY_LABEL]
4147    files_out = [basename_out+'_ha.nc',
4148                 basename_out+'_ua.nc',
4149                 basename_out+'_va.nc']
4150    quantities = ['HA','UA','VA']
4151
4152    for file_name in files_in:
4153        if os.access(file_name, os.F_OK) == 0 :
4154            msg = 'File %s does not exist or is not accessible' %file_name
4155            raise IOError, msg
4156       
4157    hashed_elevation = None
4158    for file_in, file_out, quantity in map(None, files_in,
4159                                           files_out,
4160                                           quantities):
4161        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4162                                         file_out,
4163                                         quantity)
4164        #print "lonlatdep", lonlatdep
4165        if hashed_elevation == None:
4166            elevation_file = basename_out+'_e.nc'
4167            write_elevation_nc(elevation_file,
4168                                lon,
4169                                lat,
4170                                depth)
4171            hashed_elevation = myhash(lonlatdep)
4172        else:
4173            msg = "The elevation information in the mux files is inconsistent"
4174            assert hashed_elevation == myhash(lonlatdep), msg
4175    files_out.append(elevation_file)
4176    return files_out
4177   
4178def _binary_c2nc(file_in, file_out, quantity):
4179    """
4180    Reads in a quantity urs file and writes a quantity nc file.
4181    additionally, returns the depth and lat, long info,
4182    so it can be written to a file.
4183    """
4184    columns = 3 # long, lat , depth
4185    mux_file = open(file_in, 'rb')
4186   
4187    # Number of points/stations
4188    (points_num,)= unpack('i',mux_file.read(4))
4189
4190    # nt, int - Number of time steps
4191    (time_step_count,)= unpack('i',mux_file.read(4))
4192
4193    #dt, float - time step, seconds
4194    (time_step,) = unpack('f', mux_file.read(4))
4195   
4196    msg = "Bad data in the mux file."
4197    if points_num < 0:
4198        mux_file.close()
4199        raise ANUGAError, msg
4200    if time_step_count < 0:
4201        mux_file.close()
4202        raise ANUGAError, msg
4203    if time_step < 0:
4204        mux_file.close()
4205        raise ANUGAError, msg
4206   
4207    lonlatdep = p_array.array('f')
4208    lonlatdep.read(mux_file, columns * points_num)
4209    lonlatdep = array(lonlatdep, typecode=Float)   
4210    lonlatdep = reshape(lonlatdep, (points_num, columns))
4211   
4212    lon, lat, depth = lon_lat2grid(lonlatdep)
4213    lon_sorted = list(lon)
4214    lon_sorted.sort()
4215
4216    if not lon == lon_sorted:
4217        msg = "Longitudes in mux file are not in ascending order"
4218        raise IOError, msg
4219    lat_sorted = list(lat)
4220    lat_sorted.sort()
4221
4222    if not lat == lat_sorted:
4223        msg = "Latitudes in mux file are not in ascending order"
4224   
4225    nc_file = Write_nc(quantity,
4226                       file_out,
4227                       time_step_count,
4228                       time_step,
4229                       lon,
4230                       lat)
4231
4232    for i in range(time_step_count):
4233        #Read in a time slice  from mux file 
4234        hz_p_array = p_array.array('f')
4235        hz_p_array.read(mux_file, points_num)
4236        hz_p = array(hz_p_array, typecode=Float)
4237        hz_p = reshape(hz_p, (len(lon), len(lat)))
4238        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4239
4240        #write time slice to nc file
4241        nc_file.store_timestep(hz_p)
4242    mux_file.close()
4243    nc_file.close()
4244
4245    return lonlatdep, lon, lat, depth
4246   
4247
4248def write_elevation_nc(file_out, lon, lat, depth_vector):
4249    """
4250    Write an nc elevation file.
4251    """
4252   
4253    # NetCDF file definition
4254    outfile = NetCDFFile(file_out, 'w')
4255
4256    #Create new file
4257    nc_lon_lat_header(outfile, lon, lat)
4258   
4259    # ELEVATION
4260    zname = 'ELEVATION'
4261    outfile.createVariable(zname, precision, (lat_name, lon_name))
4262    outfile.variables[zname].units='CENTIMETERS'
4263    outfile.variables[zname].missing_value=-1.e+034
4264
4265    outfile.variables[lon_name][:]= ensure_numeric(lon)
4266    outfile.variables[lat_name][:]= ensure_numeric(lat)
4267
4268    depth = reshape(depth_vector, ( len(lat), len(lon)))
4269    outfile.variables[zname][:]= depth
4270   
4271    outfile.close()
4272   
4273def nc_lon_lat_header(outfile, lon, lat):
4274    """
4275    outfile is the netcdf file handle.
4276    lon - a list/array of the longitudes
4277    lat - a list/array of the latitudes
4278    """
4279   
4280    outfile.institution = 'Geoscience Australia'
4281    outfile.description = 'Converted from URS binary C'
4282   
4283    # Longitude
4284    outfile.createDimension(lon_name, len(lon))
4285    outfile.createVariable(lon_name, precision, (lon_name,))
4286    outfile.variables[lon_name].point_spacing='uneven'
4287    outfile.variables[lon_name].units='degrees_east'
4288    outfile.variables[lon_name].assignValue(lon)
4289
4290
4291    # Latitude
4292    outfile.createDimension(lat_name, len(lat))
4293    outfile.createVariable(lat_name, precision, (lat_name,))
4294    outfile.variables[lat_name].point_spacing='uneven'
4295    outfile.variables[lat_name].units='degrees_north'
4296    outfile.variables[lat_name].assignValue(lat)
4297
4298
4299   
4300def lon_lat2grid(long_lat_dep):
4301    """
4302    given a list of points that are assumed to be an a grid,
4303    return the long's and lat's of the grid.
4304    long_lat_dep is an array where each row is a position.
4305    The first column is longitudes.
4306    The second column is latitudes.
4307
4308    The latitude is the fastest varying dimension - in mux files
4309    """
4310    LONG = 0
4311    LAT = 1
4312    QUANTITY = 2
4313
4314    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4315   
4316    num_points = long_lat_dep.shape[0]
4317    this_rows_long = long_lat_dep[0,LONG]
4318
4319    # Count the length of unique latitudes
4320    i = 0
4321    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4322        i += 1
4323    # determine the lats and longsfrom the grid
4324    lat = long_lat_dep[:i, LAT]       
4325    long = long_lat_dep[::i, LONG]
4326   
4327    lenlong = len(long)
4328    lenlat = len(lat)
4329    #print 'len lat', lat, len(lat)
4330    #print 'len long', long, len(long)
4331         
4332    msg = 'Input data is not gridded'     
4333    assert num_points % lenlat == 0, msg
4334    assert num_points % lenlong == 0, msg
4335         
4336    # Test that data is gridded       
4337    for i in range(lenlong):
4338        msg = 'Data is not gridded.  It must be for this operation'
4339        first = i*lenlat
4340        last = first + lenlat
4341               
4342        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4343        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4344   
4345   
4346#    print 'range long', min(long), max(long)
4347#    print 'range lat', min(lat), max(lat)
4348#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4349#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4350   
4351   
4352   
4353    msg = 'Out of range latitudes/longitudes'
4354    for l in lat:assert -90 < l < 90 , msg
4355    for l in long:assert -180 < l < 180 , msg
4356
4357    # Changing quantity from lat being the fastest varying dimension to
4358    # long being the fastest varying dimension
4359    # FIXME - make this faster/do this a better way
4360    # use numeric transpose, after reshaping the quantity vector
4361#    quantity = zeros(len(long_lat_dep), Float)
4362    quantity = zeros(num_points, Float)
4363   
4364#    print 'num',num_points
4365    for lat_i, _ in enumerate(lat):
4366        for long_i, _ in enumerate(long):
4367            q_index = lat_i*lenlong+long_i
4368            lld_index = long_i*lenlat+lat_i
4369#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4370            temp = long_lat_dep[lld_index, QUANTITY]
4371            quantity[q_index] = temp
4372           
4373    return long, lat, quantity
4374
4375    ####  END URS 2 SWW  ###
4376
4377    #### URS UNGRIDDED 2 SWW ###
4378
4379    ### PRODUCING THE POINTS NEEDED FILE ###
4380LL_LAT = -50.0
4381LL_LONG = 80.0
4382GRID_SPACING = 1.0/60.0
4383LAT_AMOUNT = 4800
4384LONG_AMOUNT = 3600
4385def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4386                              ll_lat=LL_LAT, ll_long=LL_LONG,
4387                              grid_spacing=GRID_SPACING, 
4388                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4389                              export_csv=False, use_cache=False,
4390                              verbose=False):
4391    """
4392    file_name - name of the urs file produced for David.
4393    boundary_polygon - a list of points that describes a polygon.
4394                      The last point is assumed ot join the first point.
4395                      This is in UTM (lat long would be better though)
4396
4397    ll_lat - lower left latitude, in decimal degrees
4398    ll-long - lower left longitude, in decimal degrees
4399    grid_spacing - in deciamal degrees
4400
4401
4402    Don't add the file extension.  It will be added.
4403    """
4404    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4405                            grid_spacing, 
4406                      lat_amount, long_amount,use_cache, verbose)
4407    if not file_name[-4:] == ".urs":
4408        file_name += ".urs"
4409    geo.export_points_file(file_name)
4410    if export_csv:
4411        if file_name[-4:] == ".urs":
4412            file_name = file_name[:-4] + ".csv"
4413        geo.export_points_file(file_name)
4414
4415def URS_points_needed(boundary_polygon, zone, ll_lat=LL_LAT,
4416                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4417                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4418                      use_cache=False, verbose=False):
4419    args = (boundary_polygon,
4420                      zone)
4421    kwargs = {'ll_lat': ll_lat,
4422              'll_long': ll_long,
4423              'grid_spacing': grid_spacing,
4424              'lat_amount': lat_amount,
4425              'long_amount': long_amount} 
4426    if use_cache is True:
4427        try:
4428            from anuga.caching import cache
4429        except:
4430            msg = 'Caching was requested, but caching module'+\
4431                  'could not be imported'
4432            raise msg
4433
4434
4435        geo = cache(_URS_points_needed,
4436                  args, kwargs,
4437                  verbose=verbose,
4438                  compression=False)
4439    else:
4440        #I was getting 'got multiple values for keyword argument' errors
4441        #geo = apply(_URS_points_needed, args, kwargs)
4442        geo = _URS_points_needed(boundary_polygon,
4443                      zone, ll_lat,
4444                      ll_long, grid_spacing, 
4445                      lat_amount, long_amount)
4446
4447    return geo   
4448def _URS_points_needed(boundary_polygon,
4449                      zone, ll_lat=LL_LAT,
4450                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4451                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT):
4452    """
4453
4454    boundary_polygon - a list of points that describes a polygon.
4455                      The last point is assumed ot join the first point.
4456                      This is in UTM (lat long would be better though)
4457
4458    ll_lat - lower left latitude, in decimal degrees
4459    ll-long - lower left longitude, in decimal degrees
4460    grid_spacing - in deciamal degrees
4461
4462    """
4463   
4464    from sets import ImmutableSet
4465   
4466    msg = "grid_spacing can not be zero"
4467    assert not grid_spacing ==0, msg
4468    a = boundary_polygon
4469    # List of segments.  Each segment is two points.
4470    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4471
4472    # convert the segs to Lat's and longs.
4473   
4474    # Don't assume the zone of the segments is the same as the lower left
4475    # corner of the lat long data!!  They can easily be in different zones
4476   
4477    lat_long_set = ImmutableSet()
4478    for seg in segs:
4479        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4480                      lat_amount, long_amount, zone)
4481        lat_long_set |= ImmutableSet(points_lat_long)
4482    #print "lat_long_set",lat_long_set
4483    geo = Geospatial_data(data_points=list(lat_long_set),
4484                              points_are_lats_longs=True)
4485    return geo
4486   
4487def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4488                  lat_amount, long_amount, zone):
4489    """
4490    return a list of the points, in lats and longs that are needed to
4491    interpolate any point on the segment.
4492    """
4493    from math import sqrt
4494    #print "zone",zone
4495    geo_reference = Geo_reference(zone=zone)
4496    #print "seg",seg
4497    geo = Geospatial_data(seg,geo_reference=geo_reference)
4498    seg_lat_long = geo.get_data_points(as_lat_long=True)
4499    #print "seg_lat_long", seg_lat_long
4500    # 1.415 = 2^0.5, rounded up....
4501    sqrt_2_rounded_up = 1.415
4502    buffer = sqrt_2_rounded_up * grid_spacing
4503   
4504    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4505    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4506    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4507    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4508
4509    first_row = (min_long - ll_long)/grid_spacing
4510    # To round up
4511    first_row_long = int(round(first_row + 0.5))
4512    #print "first_row", first_row_long
4513
4514    last_row = (max_long - ll_long)/grid_spacing # round down
4515    last_row_long = int(round(last_row))
4516    #print "last_row",last_row _long
4517   
4518    first_row = (min_lat - ll_lat)/grid_spacing
4519    # To round up
4520    first_row_lat = int(round(first_row + 0.5))
4521    #print "first_row", first_row_lat
4522
4523    last_row = (max_lat - ll_lat)/grid_spacing # round down
4524    last_row_lat = int(round(last_row))
4525    #print "last_row",last_row_lat
4526
4527    # to work out the max distance -
4528    # 111120 - horizontal distance between 1 deg latitude.
4529    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4530    max_distance = 157147.4112 * grid_spacing
4531    #print "max_distance", max_distance #2619.12 m for 1 minute
4532    points_lat_long = []
4533    # Create a list of the lat long points to include.
4534    for index_lat in range(first_row_lat, last_row_lat + 1):
4535        for index_long in range(first_row_long, last_row_long + 1):
4536            lat = ll_lat + index_lat*grid_spacing
4537            long = ll_long + index_long*grid_spacing
4538
4539            #filter here to keep good points
4540            if keep_point(lat, long, seg, max_distance):
4541                points_lat_long.append((lat, long)) #must be hashable
4542    #print "points_lat_long", points_lat_long
4543
4544    # Now that we have these points, lets throw ones out that are too far away
4545    return points_lat_long
4546
4547def keep_point(lat, long, seg, max_distance):
4548    """
4549    seg is two points, UTM
4550    """
4551    from math import sqrt
4552    _ , x0, y0 = redfearn(lat, long)
4553    x1 = seg[0][0]
4554    y1 = seg[0][1]
4555    x2 = seg[1][0]
4556    y2 = seg[1][1]
4557
4558    x2_1 = x2-x1
4559    y2_1 = y2-y1
4560    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4561    if d <= max_distance:
4562        return True
4563    else:
4564        return False
4565   
4566    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4567   
4568WAVEHEIGHT_MUX_LABEL = '_waveheight-z-mux'
4569EAST_VELOCITY_LABEL =  '_velocity-e-mux'
4570NORTH_VELOCITY_LABEL =  '_velocity-n-mux' 
4571def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4572                      mint=None, maxt=None,
4573                      mean_stage=0,
4574                      origin=None,
4575                      hole_points_UTM=None,
4576                      zscale=1):
4577    """   
4578    Convert URS C binary format for wave propagation to
4579    sww format native to abstract_2d_finite_volumes.
4580
4581
4582    Specify only basename_in and read files of the form
4583    basefilename_velocity-z-mux, basefilename_velocity-e-mux and
4584    basefilename_waveheight-n-mux containing relative height,
4585    x-velocity and y-velocity, respectively.
4586
4587    Also convert latitude and longitude to UTM. All coordinates are
4588    assumed to be given in the GDA94 datum. The latitude and longitude
4589    information is assumed ungridded grid.
4590
4591    min's and max's: If omitted - full extend is used.
4592    To include a value min ans max may equal it.
4593    Lat and lon are assumed to be in decimal degrees.
4594   
4595    origin is a 3-tuple with geo referenced
4596    UTM coordinates (zone, easting, northing)
4597    It will be the origin of the sww file. This shouldn't be used,
4598    since all of anuga should be able to handle an arbitary origin.
4599    The mux point info is NOT relative to this origin.
4600
4601
4602    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4603    which means that latitude is the fastest
4604    varying dimension (row major order, so to speak)
4605
4606    In URS C binary the latitudes and longitudes are in assending order.
4607
4608    Note, interpolations of the resulting sww file will be different
4609    from results of urs2sww.  This is due to the interpolation
4610    function used, and the different grid structure between urs2sww
4611    and this function.
4612   
4613    Interpolating data that has an underlying gridded source can
4614    easily end up with different values, depending on the underlying
4615    mesh.
4616
4617    consider these 4 points
4618    50  -50
4619
4620    0     0
4621
4622    The grid can be
4623     -
4624    |\|    A
4625     -
4626     or;
4627      -
4628     |/|   B
4629      -
4630      If a point is just below the center of the midpoint, it will have a
4631      +ve value in grid A and a -ve value in grid B.
4632    """ 
4633    from anuga.pmesh.mesh import Mesh, NoTrianglesError
4634
4635    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4636                basename_in + EAST_VELOCITY_LABEL,
4637                basename_in + NORTH_VELOCITY_LABEL]
4638    quantities = ['HA','UA','VA']
4639
4640    # instanciate urs_points of the three mux files.
4641    mux = {}
4642    for quantity, file in map(None, quantities, files_in):
4643        mux[quantity] = Urs_points(file)
4644       
4645    # Could check that the depth is the same. (hashing)
4646
4647    # handle to a mux file to do depth stuff
4648    a_mux = mux[quantities[0]]
4649   
4650    # Convert to utm
4651    lat = a_mux.lonlatdep[:,1]
4652    long = a_mux.lonlatdep[:,0]
4653    points_utm, zone = convert_from_latlon_to_utm( \
4654        latitudes=lat, longitudes=long)
4655    #print "points_utm", points_utm
4656    #print "zone", zone
4657
4658    elevation = a_mux.lonlatdep[:,2] * -1 #
4659   
4660    # grid ( create a mesh from the selected points)
4661    # This mesh has a problem.  Triangles are streched over ungridded areas.
4662    #  If these areas could be described as holes in pmesh, that would be great
4663
4664    # I can't just get the user to selection a point in the middle.
4665    # A boundary is needed around these points.
4666    # But if the zone of points is obvious enough auto-segment should do
4667    # a good boundary.
4668    mesh = Mesh()
4669    mesh.add_vertices(points_utm)
4670    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4671    # To try and avoid alpha shape 'hugging' too much
4672    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
4673    if hole_points_UTM is not None:
4674        point = ensure_absolute(hole_points_UTM)
4675        mesh.add_hole(point[0], point[1])
4676    try:
4677        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4678    except NoTrianglesError:
4679        # This is a bit of a hack, going in and changing the
4680        # data structure.
4681        mesh.holes = []
4682        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4683    mesh_dic = mesh.Mesh2MeshList()
4684
4685    #mesh.export_mesh_file(basename_in + '_168.tsh')
4686    #import sys; sys.exit()
4687    # These are the times of the mux file
4688    mux_times = []
4689    for i in range(a_mux.time_step_count):
4690        mux_times.append(a_mux.time_step * i) 
4691    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4692    times = mux_times[mux_times_start_i:mux_times_fin_i]
4693   
4694    if mux_times_start_i == mux_times_fin_i:
4695        # Close the mux files
4696        for quantity, file in map(None, quantities, files_in):
4697            mux[quantity].close()
4698        msg="Due to mint and maxt there's no time info in the boundary SWW."
4699        raise Exception, msg
4700       
4701    # If this raise is removed there is currently no downstream errors
4702           
4703    points_utm=ensure_numeric(points_utm)
4704    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4705    #print "points_utm", points_utm
4706    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4707           ensure_numeric(points_utm)
4708   
4709    volumes = mesh_dic['generatedtrianglelist']
4710   
4711    # write sww intro and grid stuff.   
4712    if basename_out is None:
4713        swwname = basename_in + '.sww'
4714    else:
4715        swwname = basename_out + '.sww'
4716
4717    if verbose: print 'Output to ', swwname
4718    outfile = NetCDFFile(swwname, 'w')
4719    # For a different way of doing this, check out tsh2sww
4720    # work out sww_times and the index range this covers
4721    sww = Write_sww()
4722    sww.header(outfile, times, len(volumes), len(points_utm),
4723                         verbose=verbose)
4724    outfile.mean_stage = mean_stage
4725    outfile.zscale = zscale
4726
4727    sww.triangulation(outfile, points_utm, volumes,
4728                            elevation, zone,  new_origin=origin,
4729                            verbose=verbose)
4730   
4731    if verbose: print 'Converting quantities'
4732    j = 0
4733    # Read in a time slice from each mux file and write it to the sww file
4734    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
4735        if j >= mux_times_start_i and j < mux_times_fin_i:
4736            stage = zscale*ha + mean_stage
4737            h = stage - elevation
4738            xmomentum = ua*h
4739            ymomentum = -1*va*h # -1 since in mux files south is positive.
4740            sww.quantities(outfile, 
4741                                     slice_index=j - mux_times_start_i,
4742                                     verbose=verbose,
4743                                     stage=stage,
4744                                     xmomentum=xmomentum,
4745                                     ymomentum=ymomentum)
4746        j += 1
4747    if verbose: sww.verbose_quantities(outfile)
4748    outfile.close()
4749    #
4750    # Do some conversions while writing the sww file
4751
4752   
4753def mux2sww_time(mux_times, mint, maxt):
4754    """
4755    """
4756
4757    if mint == None:
4758        mux_times_start_i = 0
4759    else:
4760        mux_times_start_i = searchsorted(mux_times, mint)
4761       
4762    if maxt == None:
4763        mux_times_fin_i = len(mux_times)
4764    else:
4765        maxt += 0.5 # so if you specify a time where there is
4766                    # data that time will be included
4767        mux_times_fin_i = searchsorted(mux_times, maxt)
4768
4769    return mux_times_start_i, mux_times_fin_i
4770
4771
4772class Write_sww:
4773    from anuga.shallow_water.shallow_water_domain import Domain
4774    sww_quantities = Domain.conserved_quantities
4775    RANGE = '_range'
4776    def __init__(self):
4777        pass
4778   
4779    def header(self,outfile, times, number_of_volumes,
4780                         number_of_points,
4781                         description='Converted from XXX',
4782                         smoothing=True,
4783                         order=1, verbose=False):
4784        """
4785        outfile - the name of the file that will be written
4786        times - A list of the time slice times OR a start time
4787        Note, if a list is given the info will be made relative.
4788        number_of_volumes - the number of triangles
4789        """
4790   
4791        outfile.institution = 'Geoscience Australia'
4792        outfile.description = description
4793
4794        #For sww compatibility
4795        if smoothing is True:
4796            # Smoothing to be depreciated
4797            outfile.smoothing = 'Yes'
4798            outfile.vertices_are_stored_uniquely = 'False'
4799        else:
4800            # Smoothing to be depreciated
4801            outfile.smoothing = 'No'
4802            outfile.vertices_are_stored_uniquely = 'True'
4803        outfile.order = order
4804
4805        try:
4806            revision_number = get_revision_number()
4807        except:
4808            revision_number = None
4809        # writing a string so None can be written   
4810        outfile.revision_number = str(revision_number)
4811        #times - A list or array of the time slice times OR a start time
4812        #times = ensure_numeric(times)
4813        #Start time in seconds since the epoch (midnight 1/1/1970)
4814
4815        # this is being used to seperate one number from a list.
4816        # what it is actually doing is sorting lists from numeric arrays.
4817        if type(times) is list or type(times) is ArrayType: 
4818            number_of_times = len(times)
4819            times = ensure_numeric(times) 
4820            if number_of_times == 0:
4821                starttime = 0
4822            else:
4823                starttime = times[0]
4824                times = times - starttime  #Store relative times
4825        else:
4826            number_of_times = 0
4827            starttime = times
4828            #times = ensure_numeric([])
4829        outfile.starttime = starttime
4830        # dimension definitions
4831        outfile.createDimension('number_of_volumes', number_of_volumes)
4832        outfile.createDimension('number_of_vertices', 3)
4833        outfile.createDimension('numbers_in_range', 2)
4834   
4835        if smoothing is True:
4836            outfile.createDimension('number_of_points', number_of_points)
4837       
4838            # FIXME(Ole): This will cause sww files for paralle domains to
4839            # have ghost nodes stored (but not used by triangles).
4840            # To clean this up, we have to change get_vertex_values and
4841            # friends in quantity.py (but I can't be bothered right now)
4842        else:
4843            outfile.createDimension('number_of_points', 3*number_of_volumes)
4844        outfile.createDimension('number_of_timesteps', number_of_times)
4845
4846        # variable definitions
4847        outfile.createVariable('x', precision, ('number_of_points',))
4848        outfile.createVariable('y', precision, ('number_of_points',))
4849        outfile.createVariable('elevation', precision, ('number_of_points',))
4850        q = 'elevation'
4851        outfile.createVariable(q+Write_sww.RANGE, precision,
4852                               ('numbers_in_range',))
4853        # The values are initally filled with large (10e+36) numbers.
4854        # I'm relying on this feature.  Maybe I shouldn't?
4855        outfile.variables[q+Write_sww.RANGE][1] = \
4856                          -1*outfile.variables[q+Write_sww.RANGE][1]
4857
4858        #FIXME: Backwards compatibility
4859        outfile.createVariable('z', precision, ('number_of_points',))
4860        #################################
4861
4862        outfile.createVariable('volumes', Int, ('number_of_volumes',
4863                                                'number_of_vertices'))
4864       
4865        outfile.createVariable('time', precision,
4866                               ('number_of_timesteps',))
4867       
4868        for q in Write_sww.sww_quantities:
4869            outfile.createVariable(q, precision,
4870                                   ('number_of_timesteps',
4871                                    'number_of_points')) 
4872            outfile.createVariable(q+Write_sww.RANGE, precision,
4873                                   ('numbers_in_range',))
4874            # Initialising the max value to a very small number.
4875            # It assumes that netcdf initialises it to a very large number
4876            outfile.variables[q+Write_sww.RANGE][1] = \
4877                -outfile.variables[q+Write_sww.RANGE][1]
4878           
4879        if type(times) is list or type(times) is ArrayType: 
4880            outfile.variables['time'][:] = times    #Store time relative
4881           
4882        if verbose:
4883            print '------------------------------------------------'
4884            print 'Statistics:'
4885            print '    t in [%f, %f], len(t) == %d'\
4886                  %(min(times.flat), max(times.flat), len(times.flat))
4887
4888       
4889    def triangulation(self,outfile, points_utm, volumes,
4890                                elevation, zone=None, new_origin=None, 
4891                                points_georeference=None, verbose=False):
4892        """
4893       
4894        new_origin - qa georeference that the points can be set to. (Maybe
4895        do this before calling this function.)
4896       
4897        points_utm - currently a list or array of the points in UTM.
4898        points_georeference - the georeference of the points_utm
4899       
4900        How about passing new_origin and current_origin.
4901        If you get both, do a convertion from the old to the new.
4902       
4903        If you only get new_origin, the points are absolute, convert to relative
4904       
4905        if you only get the current_origin the points are relative, store
4906        as relative.
4907       
4908        if you get no georefs create a new georef based on the minimums of
4909        points_utm.  (Another option would be to default to absolute)
4910       
4911        Yes, and this is done in another part of the code.
4912        Probably geospatial.
4913       
4914        If you don't supply either geo_refs, then supply a zone. If not
4915        the default zone will be used.
4916       
4917       
4918        precon
4919       
4920        header has been called.
4921        """
4922       
4923        number_of_points = len(points_utm)   
4924        volumes = array(volumes)
4925
4926        # given the two geo_refs and the points, do the stuff
4927        # described in the method header
4928        # if this is needed else where, pull out as a function
4929        points_georeference = ensure_geo_reference(points_georeference)
4930        new_origin = ensure_geo_reference(new_origin)
4931        if new_origin is None and points_georeference is not None:
4932            points = points_utm
4933            geo_ref = points_georeference
4934        else:
4935            if new_origin is None:
4936                new_origin = Geo_reference(zone,min(points_utm[:,0]),
4937                                           min(points_utm[:,1]))
4938            points = new_origin.change_points_geo_ref(points_utm,
4939                                                      points_georeference)
4940            geo_ref = new_origin
4941
4942        # At this stage I need a georef and points
4943        # the points are relative to the georef
4944        geo_ref.write_NetCDF(outfile)
4945   
4946        # This will put the geo ref in the middle
4947        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4948        x =  points[:,0]
4949        y =  points[:,1]
4950        z = outfile.variables['z'][:]
4951   
4952        if verbose:
4953            print '------------------------------------------------'
4954            print 'More Statistics:'
4955            print '  Extent (/lon):'
4956            print '    x in [%f, %f], len(lat) == %d'\
4957                  %(min(x), max(x),
4958                    len(x))
4959            print '    y in [%f, %f], len(lon) == %d'\
4960                  %(min(y), max(y),
4961                    len(y))
4962            print '    z in [%f, %f], len(z) == %d'\
4963                  %(min(elevation), max(elevation),
4964                    len(elevation))
4965            print 'geo_ref: ',geo_ref
4966            print '------------------------------------------------'
4967           
4968        #z = resize(bath_grid,outfile.variables['z'][:].shape)
4969        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
4970        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
4971        outfile.variables['z'][:] = elevation
4972        outfile.variables['elevation'][:] = elevation  #FIXME HACK
4973        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
4974
4975        q = 'elevation'
4976        # This updates the _range values
4977        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
4978        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
4979
4980    def quantities(self, outfile, precision=Float,
4981                             slice_index=None, time=None,
4982                             verbose=False, **quant):
4983        """
4984        Write the quantity info.
4985
4986        **quant is extra keyword arguments passed in. These must be
4987          the sww quantities, currently; stage, xmomentum, ymomentum.
4988       
4989        if the time array is already been built, use the slice_index
4990        to specify the index.
4991       
4992        Otherwise, use time to increase the time dimension
4993
4994        Maybe make this general, but the viewer assumes these quantities,
4995        so maybe we don't want it general - unless the viewer is general
4996       
4997        precon
4998        triangulation and
4999        header have been called.
5000        """
5001
5002        if time is not None:
5003            file_time = outfile.variables['time']
5004            slice_index = len(file_time)
5005            file_time[slice_index] = time   
5006
5007        # write the conserved quantities from Doamin.
5008        # Typically stage,  xmomentum, ymomentum
5009        # other quantities will be ignored, silently.
5010        for q in Write_sww.sww_quantities:
5011            if not quant.has_key(q):
5012                msg = 'SWW file can not write quantity %s' %q
5013                raise NewQuantity, msg
5014            else:
5015                q_values = quant[q]
5016                outfile.variables[q][slice_index] = q_values.astype(precision)
5017
5018                # This updates the _range values
5019                q_range = outfile.variables[q+Write_sww.RANGE][:]
5020                q_values_min = min(q_values)
5021                if q_values_min < q_range[0]:
5022                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5023                q_values_max = max(q_values)
5024                if q_values_max > q_range[1]:
5025                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5026
5027    def verbose_quantities(self, outfile):
5028        print '------------------------------------------------'
5029        print 'More Statistics:'
5030        for q in Write_sww.sww_quantities:
5031            print %s in [%f, %f]' %(q
5032                                       ,outfile.variables[q+Write_sww.RANGE][0]
5033                                       ,outfile.variables[q+Write_sww.RANGE][1]
5034                                       )
5035        print '------------------------------------------------'
5036       
5037def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
5038                         mean_stage=0, zscale=1,
5039                         verbose=False):   
5040    #Time stepping
5041    stage = outfile.variables['stage']
5042    xmomentum = outfile.variables['xmomentum']
5043    ymomentum = outfile.variables['ymomentum']
5044
5045    n = len(has)
5046    j=0
5047    for ha, ua, va in map(None, has, uas, vas):
5048        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5049        w = zscale*ha + mean_stage
5050        stage[j] = w
5051        h = w - elevation
5052        xmomentum[j] = ua*h
5053        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
5054        j += 1
5055   
5056def urs2txt(basename_in, location_index=None):
5057    """
5058    Not finished or tested
5059    """
5060   
5061    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5062                basename_in + EAST_VELOCITY_LABEL,
5063                basename_in + NORTH_VELOCITY_LABEL]
5064    quantities = ['HA','UA','VA']
5065
5066    d = ","
5067   
5068    # instanciate urs_points of the three mux files.
5069    mux = {}
5070    for quantity, file in map(None, quantities, files_in):
5071        mux[quantity] = Urs_points(file)
5072       
5073    # Could check that the depth is the same. (hashing)
5074
5075    # handle to a mux file to do depth stuff
5076    a_mux = mux[quantities[0]]
5077   
5078    # Convert to utm
5079    latitudes = a_mux.lonlatdep[:,1]
5080    longitudes = a_mux.lonlatdep[:,0]
5081    points_utm, zone = convert_from_latlon_to_utm( \
5082        latitudes=latitudes, longitudes=longitudes)
5083    #print "points_utm", points_utm
5084    #print "zone", zone
5085    depths = a_mux.lonlatdep[:,2]  #
5086   
5087    fid = open(basename_in + '.txt', 'w')
5088
5089    fid.write("zone: " + str(zone) + "\n")
5090
5091    if location_index is not None:
5092        #Title
5093        li = location_index
5094        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5095                  'Northing' + "\n")
5096        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5097              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5098              str(points_utm[li][01]) + "\n")
5099
5100    # the non-time dependent stuff
5101    #Title
5102    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5103                  'Northing' +d+ 'depth m' + "\n")
5104    i = 0
5105    for depth, point_utm, lat, long in map(None, depths,
5106                                               points_utm, latitudes,
5107                                               longitudes):
5108       
5109        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5110                  str(point_utm[01]) +d+ str(depth) + "\n")
5111        i +=1
5112    #Time dependent
5113    if location_index is not None:
5114        time_step = a_mux.time_step
5115        i = 0
5116        #Title
5117        fid.write('time' +d+ 'HA depth m'+d+ \
5118                      'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
5119                      + "\n")
5120        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5121            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5122                      str(UA[location_index]) +d+ str(VA[location_index]) \
5123                      + "\n")
5124           
5125            i +=1
5126   
5127class Urs_points:
5128    """
5129    Read the info in URS mux files.
5130
5131    for the quantities heres a correlation between the file names and
5132    what they mean;
5133    z-mux is height above sea level, m
5134    e-mux is velocity is Eastern direction, m/s
5135    n-mux is velocity is Northern direction, m/s   
5136    """
5137    def __init__(self,urs_file):
5138        self.iterated = False
5139        columns = 3 # long, lat , depth
5140        mux_file = open(urs_file, 'rb')
5141       
5142        # Number of points/stations
5143        (self.points_num,)= unpack('i',mux_file.read(4))
5144       
5145        # nt, int - Number of time steps
5146        (self.time_step_count,)= unpack('i',mux_file.read(4))
5147        #print "self.time_step_count", self.time_step_count
5148        #dt, float - time step, seconds
5149        (self.time_step,) = unpack('f', mux_file.read(4))
5150        #print "self.time_step", self.time_step
5151        msg = "Bad data in the urs file."
5152        if self.points_num < 0:
5153            mux_file.close()
5154            raise ANUGAError, msg
5155        if self.time_step_count < 0:
5156            mux_file.close()
5157            raise ANUGAError, msg
5158        if self.time_step < 0:
5159            mux_file.close()
5160            raise ANUGAError, msg
5161
5162        # the depth is in meters, and it is the distance from the ocean
5163        # to the sea bottom.
5164        lonlatdep = p_array.array('f')
5165        lonlatdep.read(mux_file, columns * self.points_num)
5166        lonlatdep = array(lonlatdep, typecode=Float)   
5167        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5168        #print 'lonlatdep',lonlatdep
5169        self.lonlatdep = lonlatdep
5170       
5171        self.mux_file = mux_file
5172        # check this array
5173
5174    def __iter__(self):
5175        """
5176        iterate over quantity data which is with respect to time.
5177
5178        Note: You can only interate once over an object
5179       
5180        returns quantity infomation for each time slice
5181        """
5182        msg =  "You can only interate once over a urs file."
5183        assert not self.iterated, msg
5184        self.iter_time_step = 0
5185        self.iterated = True
5186        return self
5187   
5188    def next(self):
5189        if self.time_step_count == self.iter_time_step:
5190            self.close()
5191            raise StopIteration
5192        #Read in a time slice  from mux file 
5193        hz_p_array = p_array.array('f')
5194        hz_p_array.read(self.mux_file, self.points_num)
5195        hz_p = array(hz_p_array, typecode=Float)
5196        self.iter_time_step += 1
5197       
5198        return hz_p
5199
5200    def close(self):
5201        self.mux_file.close()
5202       
5203    #### END URS UNGRIDDED 2 SWW ###
5204
5205       
5206def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
5207                         print_to_screen=False, verbose=False):
5208    """
5209    Used to store screen output and errors to file, if run on multiple
5210    processes eachprocessor will have its own output and error file.
5211   
5212    extra_info - is used as a string that can identify outputs with another
5213    string eg. '_other'
5214    """
5215    import sys
5216    dir_name = dir_name
5217    if access(dir_name,W_OK) == 0:
5218        if verbose: print 'Make directory %s' %dir_name
5219        if verbose: print "myid", myid
5220        mkdir (dir_name,0777)
5221    if myid <>'':
5222        myid = '_'+str(myid)
5223    if numprocs <>'':
5224        numprocs = '_'+str(numprocs)
5225    if extra_info <>'':
5226        extra_info = '_'+str(extra_info)
5227    screen_output_name = dir_name + "screen_output%s%s%s.txt" %(myid,numprocs,extra_info)
5228    screen_error_name = dir_name + "screen_error%s%s%s.txt" %(myid,numprocs,extra_info)
5229    print screen_output_name
5230    #used to catch screen output to file
5231    sys.stdout = Screen_Catcher(screen_output_name)
5232    sys.stderr = Screen_Catcher(screen_error_name)
5233
5234class Screen_Catcher:
5235    """this simply catches the screen output and stores it to file defined by
5236    start_screen_catcher (above)
5237    """
5238   
5239    def __init__(self, filename):
5240        self.filename = filename
5241 
5242        if exists(self.filename)is True:
5243            print'Old existing file "%s" has been deleted' %(self.filename)
5244            remove(self.filename)
5245
5246    def write(self, stuff):
5247        fid = open(self.filename, 'a')
5248        fid.write(stuff)
5249#        if print_to_screen: print stuff
5250
5251def copy_code_files(dir_name, filename1, filename2):
5252    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5253    information management
5254    filename1 and filename2 are both absolute pathnames   
5255    """
5256
5257    if access(dir_name,F_OK) == 0:
5258        print 'Make directory %s' %dir_name
5259        mkdir (dir_name,0777)
5260    shutil.copy(filename1, dir_name + sep + basename(filename1))
5261    shutil.copy(filename2, dir_name + sep + basename(filename2))
5262#    copy (__file__, project.output_run_time_dir + basename(__file__))
5263    print 'Files %s and %s copied' %(filename1, filename2)
5264
5265def get_data_from_file(filename,separator_value = ','):
5266    """
5267    Read in data information from file and
5268   
5269    Returns:
5270        header_fields, a string? of the first line separated
5271        by the 'separator_value'
5272       
5273        data, a array (N data columns X M lines) in the file
5274        excluding the header
5275       
5276    NOTE: wont deal with columns with different lenghts and there must be
5277    no blank lines at the end.
5278    """
5279   
5280    fid = open(filename)
5281    lines = fid.readlines()
5282   
5283    fid.close()
5284   
5285    header_line = lines[0]
5286    header_fields = header_line.split(separator_value)
5287
5288    #array to store data, number in there is to allow float...
5289    #i'm sure there is a better way!
5290    data=array([],typecode=Float)
5291    data=resize(data,((len(lines)-1),len(header_fields)))
5292#    print 'number of fields',range(len(header_fields))
5293#    print 'number of lines',len(lines), shape(data)
5294#    print'data',data[1,1],header_line
5295
5296    array_number = 0
5297    line_number = 1
5298    while line_number < (len(lines)):
5299        for i in range(len(header_fields)): 
5300            #this get line below the header, explaining the +1
5301            #and also the line_number can be used as the array index
5302            fields = lines[line_number].split(separator_value)
5303            #assign to array
5304            data[array_number,i] = float(fields[i])
5305           
5306        line_number = line_number +1
5307        array_number = array_number +1
5308       
5309    return header_fields, data
5310
5311def store_parameters(verbose=False,**kwargs):
5312    """
5313    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
5314    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
5315   
5316    Must have a file_name keyword arg, this is what is writing to.
5317    might be a better way to do this using CSV module Writer and writeDict
5318   
5319    writes file to "output_dir" unless "completed" is in kwargs, then it writes to
5320    "file_name" kwargs
5321
5322    """
5323    import types
5324#    import os
5325   
5326    # Check that kwargs is a dictionary
5327    if type(kwargs) != types.DictType:
5328        raise TypeError
5329   
5330    #is completed is kwargs?
5331    try:
5332        kwargs['completed']
5333        completed=True
5334    except:
5335        completed=False
5336 
5337    #get file name and removes from dict and assert that a file_name exists
5338    if completed:
5339        try:
5340            file = str(kwargs['file_name'])
5341        except:
5342            raise 'kwargs must have file_name'
5343    else:
5344        #write temp file in output directory
5345        try:
5346            file = str(kwargs['output_dir'])+'detail_temp.csv'
5347        except:
5348            raise 'kwargs must have output_dir'
5349       
5350    #extracts the header info and the new line info
5351    line=''
5352    header=''
5353    count=0
5354    keys = kwargs.keys()
5355    keys.sort()
5356   
5357    #used the sorted keys to create the header and line data
5358    for k in keys:
5359#        print "%s = %s" %(k, kwargs[k])
5360        header = header+str(k)
5361        line = line+str(kwargs[k])
5362        count+=1
5363        if count <len(kwargs):
5364            header = header+','
5365            line = line+','
5366    header+='\n'
5367    line+='\n'
5368
5369    # checks the header info, if the same, then write, if not create a new file
5370    #try to open!
5371    try:
5372        fid = open(file,"r")
5373        file_header=fid.readline()
5374        fid.close()
5375        if verbose: print 'read file header %s' %file_header
5376       
5377    except:
5378        msg = 'try to create new file',file
5379        if verbose: print msg
5380        #tries to open file, maybe directory is bad
5381        try:
5382            fid = open(file,"w")
5383            fid.write(header)
5384            fid.close()
5385            file_header=header
5386        except:
5387            msg = 'cannot create new file',file
5388            raise msg
5389           
5390    #if header is same or this is a new file
5391    if file_header==str(header):
5392        fid=open(file,"a")
5393        #write new line
5394        fid.write(line)
5395        fid.close()
5396    else:
5397        #backup plan, if header is different and has completed will append info to
5398        #end of details_temp.cvs file in output directory
5399        file = str(kwargs['output_dir'])+'detail_temp.csv'
5400        fid=open(file,"a")
5401        fid.write(header)
5402        fid.write(line)
5403        fid.close()
5404        if verbose: print 'file',file_header.strip('\n')
5405        if verbose: print 'head',header.strip('\n')
5406        if file_header.strip('\n')==str(header): print 'they equal'
5407        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
5408        print msg
5409
5410
5411
5412
5413
5414def get_maximum_inundation_elevation(filename,
5415                                     polygon=None,
5416                                     time_interval=None,
5417                                     verbose=False):
5418   
5419    """Return highest elevation where depth > 0
5420   
5421    Usage:
5422    max_runup = get_maximum_inundation_elevation(filename,
5423                                                 polygon=None,
5424                                                 time_interval=None,
5425                                                 verbose=False)
5426
5427    filename is a NetCDF sww file containing ANUGA model output.                                                       
5428    Optional arguments polygon and time_interval restricts the maximum runup calculation
5429    to a points that lie within the specified polygon and time interval.
5430
5431    If no inundation is found within polygon and time_interval the return value
5432    is None signifying "No Runup" or "Everything is dry".
5433
5434    See general function get_maximum_inundation_data for details.
5435   
5436    """
5437   
5438    runup, _ = get_maximum_inundation_data(filename,
5439                                           polygon=polygon,
5440                                           time_interval=time_interval,
5441                                           verbose=verbose)
5442    return runup
5443
5444
5445
5446
5447def get_maximum_inundation_location(filename,
5448                                    polygon=None,
5449                                    time_interval=None,
5450                                    verbose=False):
5451    """Return location of highest elevation where h > 0
5452   
5453   
5454    Usage:
5455    max_runup_location = get_maximum_inundation_location(filename,
5456                                                         polygon=None,
5457                                                         time_interval=None,
5458                                                         verbose=False)
5459
5460    filename is a NetCDF sww file containing ANUGA model output.
5461    Optional arguments polygon and time_interval restricts the maximum runup calculation
5462    to a points that lie within the specified polygon and time interval.
5463
5464    If no inundation is found within polygon and time_interval the return value
5465    is None signifying "No Runup" or "Everything is dry".
5466
5467    See general function get_maximum_inundation_data for details.
5468    """
5469   
5470    _, max_loc = get_maximum_inundation_data(filename,
5471                                             polygon=polygon,
5472                                             time_interval=time_interval,
5473                                             verbose=verbose)
5474    return max_loc
5475   
5476
5477
5478def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
5479                                use_centroid_values=False,
5480                                verbose=False):
5481    """Compute maximum run up height from sww file.
5482
5483
5484    Usage:
5485    runup, location = get_maximum_inundation_data(filename,
5486                                                  polygon=None,
5487                                                  time_interval=None,
5488                                                  verbose=False)
5489   
5490
5491    Algorithm is as in get_maximum_inundation_elevation from shallow_water_domain
5492    except that this function works with the sww file and computes the maximal
5493    runup height over multiple timesteps.
5494   
5495    Optional arguments polygon and time_interval restricts the maximum runup calculation
5496    to a points that lie within the specified polygon and time interval. Polygon is
5497    assumed to be in (absolute) UTM coordinates in the same zone as domain.
5498
5499    If no inundation is found within polygon and time_interval the return value
5500    is None signifying "No Runup" or "Everything is dry".
5501    """
5502
5503    # We are using nodal values here as that is what is stored in sww files.
5504
5505    # Water depth below which it is considered to be 0 in the model
5506    # FIXME (Ole): Allow this to be specified as a keyword argument as well
5507
5508    from anuga.utilities.polygon import inside_polygon   
5509    from anuga.config import minimum_allowed_height
5510    from Scientific.IO.NetCDF import NetCDFFile
5511
5512    dir, base = os.path.split(filename)
5513           
5514    iterate_over = get_all_swwfiles(dir,base)
5515   
5516    # Read sww file
5517    if verbose: 
5518        print 'Reading from %s' %filename
5519        # FIXME: Use general swwstats (when done)
5520   
5521    maximal_runup = None
5522    maximal_runup_location = None
5523   
5524    for file, swwfile in enumerate (iterate_over):
5525       
5526        # Read sww file
5527        filename = join(dir,swwfile+'.sww')
5528       
5529        if verbose: 
5530            print 'Reading from %s' %filename
5531            # FIXME: Use general swwstats (when done)
5532               
5533        fid = NetCDFFile(filename)
5534   
5535        # Get geo_reference
5536        # sww files don't have to have a geo_ref
5537        try:
5538            geo_reference = Geo_reference(NetCDFObject=fid)
5539        except AttributeError, e:
5540            geo_reference = Geo_reference() # Default georef object
5541           
5542        xllcorner = geo_reference.get_xllcorner()
5543        yllcorner = geo_reference.get_yllcorner()
5544        zone = geo_reference.get_zone()
5545       
5546        # Get extent
5547        volumes = fid.variables['volumes'][:]   
5548        x = fid.variables['x'][:] + xllcorner
5549        y = fid.variables['y'][:] + yllcorner
5550   
5551   
5552        # Get the relevant quantities
5553        elevation = fid.variables['elevation'][:] 
5554        stage = fid.variables['stage'][:]
5555   
5556   
5557        # Here's where one could convert nodal information to centroid information
5558        # but is probably something we need to write in C.
5559        # Here's a Python thought which is NOT finished!!!
5560        if use_centroid_values is True:
5561            x = get_centroid_values(x, volumes)
5562            y = get_centroid_values(y, volumes)   
5563            elevation = get_centroid_values(elevation, volumes)   
5564   
5565   
5566        # Spatial restriction
5567        if polygon is not None:
5568            msg = 'polygon must be a sequence of points.'
5569            assert len(polygon[0]) == 2, msg
5570            # FIXME (Ole): Make a generic polygon input check in polygon.py and call it here
5571           
5572            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
5573   
5574            point_indices = inside_polygon(points, polygon)
5575   
5576            # Restrict quantities to polygon
5577            elevation = take(elevation, point_indices)
5578            stage = take(stage, point_indices, axis=1)
5579   
5580            # Get info for location of maximal runup
5581            points_in_polygon = take(points, point_indices)
5582            x = points_in_polygon[:,0]
5583            y = points_in_polygon[:,1]       
5584        else:
5585            # Take all points
5586            point_indices = arange(len(x))
5587           
5588   
5589        # Temporal restriction
5590        time = fid.variables['time'][:]
5591        all_timeindices = arange(len(time))       
5592        if time_interval is not None:
5593           
5594            msg = 'time_interval must be a sequence of length 2.'
5595            assert len(time_interval) == 2, msg
5596            msg = 'time_interval %s must not be decreasing.' %(time_interval)       
5597            assert time_interval[1] >= time_interval[0], msg
5598           
5599            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
5600            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
5601                   %(time[0], time[-1])
5602            if time_interval[1] < time[0]: raise ValueError(msg)
5603            if time_interval[0] > time[-1]: raise ValueError(msg)
5604   
5605            # Take time indices corresponding to interval (& is bitwise AND)
5606            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
5607                                 all_timeindices)
5608   
5609   
5610            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
5611            assert not alltrue(timesteps == 0), msg
5612   
5613   
5614        else:
5615            # Take them all
5616            timesteps = all_timeindices
5617       
5618   
5619        fid.close()
5620   
5621        # Compute maximal runup for each timestep
5622        #maximal_runup = None
5623        #maximal_runup_location = None
5624        #maximal_runups = [None]
5625        #maximal_runup_locations = [None]
5626       
5627        for i in timesteps:
5628            if use_centroid_values is True:
5629                stage_i = get_centroid_values(stage[i,:], volumes)               
5630            else:
5631                stage_i = stage[i,:]
5632               
5633            depth = stage_i  - elevation
5634       
5635            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
5636            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
5637   
5638            if alltrue(wet_nodes == 0):
5639                runup = None
5640            else:   
5641                # Find maximum elevation among wet nodes
5642                wet_elevation = take(elevation, wet_nodes)
5643   
5644                runup_index = argmax(wet_elevation)
5645                runup = max(wet_elevation)
5646                assert wet_elevation[runup_index] == runup # Must always be True
5647            if runup > maximal_runup:
5648                maximal_runup = runup      # This works even if maximal_runups is None
5649                #print "NEW RUNUP",runup
5650   
5651                # Record location
5652                wet_x = take(x, wet_nodes)
5653                wet_y = take(y, wet_nodes)           
5654                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
5655   
5656    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
5657    return maximal_runup, maximal_runup_location
5658
5659def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
5660    '''
5661    Finds all the sww files in a "look_in_dir" which contains a "base_name".
5662    will accept base_name with or without the extension ".sww"
5663   
5664    Returns: a list of strings
5665       
5666    Usage:     iterate_over = get_all_swwfiles(dir, name)
5667    then
5668               for swwfile in iterate_over:
5669                   do stuff
5670                   
5671    Check "export_grids" and "get_maximum_inundation_data" for examples
5672    '''
5673   
5674    #plus tests the extension
5675    name, extension = os.path.splitext(base_name)
5676
5677    if extension <>'' and extension <> '.sww':
5678        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
5679        raise IOError, msg
5680
5681    if look_in_dir == "":
5682        look_in_dir = "." # Unix compatibility
5683   
5684    dir_ls = os.listdir(look_in_dir)
5685    #print 'dir_ls',dir_ls, base
5686    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
5687    if len(iterate_over) == 0:
5688        msg = 'No files of the base name %s'\
5689              %(name)
5690        raise IOError, msg
5691    if verbose: print 'iterate over %s' %(iterate_over)
5692
5693    return iterate_over
5694
5695
5696
5697#-------------------------------------------------------------
5698if __name__ == "__main__":
5699    #setting umask from config to force permissions for all files and directories
5700    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
5701    # set in your .bashrc etc file)
5702    from config import umask
5703    import os 
5704    os.umask(umask)
5705
Note: See TracBrowser for help on using the repository browser.