source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4031

Last change on this file since 4031 was 4031, checked in by duncan, 18 years ago

trying to fix an error I can't reproduce.

File size: 134.5 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
66     searchsorted, zeros, allclose, around, reshape, transpose
67from Scientific.IO.NetCDF import NetCDFFile
68
69
70from anuga.coordinate_transforms.geo_reference import Geo_reference
71from anuga.geospatial_data.geospatial_data import Geospatial_data
72from anuga.config import minimum_storable_height as default_minimum_storable_height
73from anuga.utilities.numerical_tools import ensure_numeric
74from anuga.caching.caching import myhash
75from anuga.utilities.anuga_exceptions import ANUGAError
76
77def make_filename(s):
78    """Transform argument string into a suitable filename
79    """
80
81    s = s.strip()
82    s = s.replace(' ', '_')
83    s = s.replace('(', '')
84    s = s.replace(')', '')
85    s = s.replace('__', '_')
86
87    return s
88
89
90def check_dir(path, verbose=None):
91    """Check that specified path exists.
92    If path does not exist it will be created if possible
93
94    USAGE:
95       checkdir(path, verbose):
96
97    ARGUMENTS:
98        path -- Directory
99        verbose -- Flag verbose output (default: None)
100
101    RETURN VALUE:
102        Verified path including trailing separator
103
104    """
105
106    import os, sys
107    import os.path
108
109    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
110        unix = 0
111    else:
112        unix = 1
113
114
115    if path[-1] != os.sep:
116        path = path + os.sep  # Add separator for directories
117
118    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
119    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
120        try:
121            exitcode=os.mkdir(path)
122
123            # Change access rights if possible
124            #
125            if unix:
126                exitcode=os.system('chmod 775 '+path)
127            else:
128                pass  # FIXME: What about acces rights under Windows?
129
130            if verbose: print 'MESSAGE: Directory', path, 'created.'
131
132        except:
133            print 'WARNING: Directory', path, 'could not be created.'
134            if unix:
135                path = '/tmp/'
136            else:
137                path = 'C:'
138
139            print 'Using directory %s instead' %path
140
141    return(path)
142
143
144
145def del_dir(path):
146    """Recursively delete directory path and all its contents
147    """
148
149    import os
150
151    if os.path.isdir(path):
152        for file in os.listdir(path):
153            X = os.path.join(path, file)
154
155
156            if os.path.isdir(X) and not os.path.islink(X):
157                del_dir(X)
158            else:
159                try:
160                    os.remove(X)
161                except:
162                    print "Could not remove file %s" %X
163
164        os.rmdir(path)
165
166
167
168def create_filename(datadir, filename, format, size=None, time=None):
169
170    import os
171    #from anuga.config import data_dir
172
173    FN = check_dir(datadir) + filename
174
175    if size is not None:
176        FN += '_size%d' %size
177
178    if time is not None:
179        FN += '_time%.2f' %time
180
181    FN += '.' + format
182    return FN
183
184
185def get_files(datadir, filename, format, size):
186    """Get all file (names) with gven name, size and format
187    """
188
189    import glob
190
191    import os
192    #from anuga.config import data_dir
193
194    dir = check_dir(datadir)
195
196    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
197    return glob.glob(pattern)
198
199
200
201#Generic class for storing output to e.g. visualisation or checkpointing
202class Data_format:
203    """Generic interface to data formats
204    """
205
206
207    def __init__(self, domain, extension, mode = 'w'):
208        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
209                                        '''   'w' (write)'''+\
210                                        '''   'r' (read)''' +\
211                                        '''   'a' (append)'''
212
213        #Create filename
214        self.filename = create_filename(domain.get_datadir(),
215                                        domain.get_name(), extension)
216
217        #print 'F', self.filename
218        self.timestep = 0
219        self.domain = domain
220       
221
222
223        # Exclude ghosts in case this is a parallel domain
224        self.number_of_nodes = domain.number_of_full_nodes       
225        self.number_of_volumes = domain.number_of_full_triangles
226        #self.number_of_volumes = len(domain)       
227
228
229
230
231        #FIXME: Should we have a general set_precision function?
232
233
234
235#Class for storing output to e.g. visualisation
236class Data_format_sww(Data_format):
237    """Interface to native NetCDF format (.sww) for storing model output
238
239    There are two kinds of data
240
241    1: Constant data: Vertex coordinates and field values. Stored once
242    2: Variable data: Conserved quantities. Stored once per timestep.
243
244    All data is assumed to reside at vertex locations.
245    """
246
247
248    def __init__(self, domain, mode = 'w',\
249                 max_size = 2000000000,
250                 recursion = False):
251        from Scientific.IO.NetCDF import NetCDFFile
252        from Numeric import Int, Float, Float32
253
254        self.precision = Float32 #Use single precision for quantities
255        if hasattr(domain, 'max_size'):
256            self.max_size = domain.max_size #file size max is 2Gig
257        else:
258            self.max_size = max_size
259        self.recursion = recursion
260        self.mode = mode
261
262        Data_format.__init__(self, domain, 'sww', mode)
263
264        if hasattr(domain, 'minimum_storable_height'):
265            self.minimum_storable_height =  domain.minimum_storable_height
266        else:
267            self.minimum_storable_height = default_minimum_storable_height
268
269        # NetCDF file definition
270        fid = NetCDFFile(self.filename, mode)
271
272        if mode == 'w':
273
274            #Create new file
275            fid.institution = 'Geoscience Australia'
276            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
277
278            if domain.smooth:
279                fid.smoothing = 'Yes'
280            else:
281                fid.smoothing = 'No'
282
283            fid.order = domain.default_order
284
285            if hasattr(domain, 'texture'):
286                fid.texture = domain.texture
287            #else:
288            #    fid.texture = 'None'
289
290            #Reference point
291            #Start time in seconds since the epoch (midnight 1/1/1970)
292            #FIXME: Use Georef
293            fid.starttime = domain.starttime
294
295            # dimension definitions
296            fid.createDimension('number_of_volumes', self.number_of_volumes)
297            fid.createDimension('number_of_vertices', 3)
298
299            if domain.smooth is True:
300                #fid.createDimension('number_of_points', len(domain.vertexlist))
301                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
302
303                # FIXME(Ole): This will cause sww files for paralle domains to
304                # have ghost nodes stored (but not used by triangles).
305                # To clean this up, we have to change get_vertex_values and friends in
306                # quantity.py (but I can't be bothered right now)
307            else:
308                fid.createDimension('number_of_points', 3*self.number_of_volumes)
309
310            fid.createDimension('number_of_timesteps', None) #extensible
311
312            # variable definitions
313            fid.createVariable('x', self.precision, ('number_of_points',))
314            fid.createVariable('y', self.precision, ('number_of_points',))
315            fid.createVariable('elevation', self.precision, ('number_of_points',))
316            if domain.geo_reference is not None:
317                domain.geo_reference.write_NetCDF(fid)
318
319            #FIXME: Backwards compatibility
320            fid.createVariable('z', self.precision, ('number_of_points',))
321            #################################
322
323            fid.createVariable('volumes', Int, ('number_of_volumes',
324                                                'number_of_vertices'))
325
326            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
327                                               # close to each other may appear as the same step
328                               ('number_of_timesteps',))
329
330            fid.createVariable('stage', self.precision,
331                               ('number_of_timesteps',
332                                'number_of_points'))
333
334            fid.createVariable('xmomentum', self.precision,
335                               ('number_of_timesteps',
336                                'number_of_points'))
337
338            fid.createVariable('ymomentum', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342        #Close
343        fid.close()
344
345
346    def store_connectivity(self):
347        """Specialisation of store_connectivity for net CDF format
348
349        Writes x,y,z coordinates of triangles constituting
350        the bed elevation.
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354
355        from Numeric import concatenate, Int
356
357        domain = self.domain
358
359        #Get NetCDF
360        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
361
362        # Get the variables
363        x = fid.variables['x']
364        y = fid.variables['y']
365        z = fid.variables['elevation']
366
367        volumes = fid.variables['volumes']
368
369        # Get X, Y and bed elevation Z
370        Q = domain.quantities['elevation']
371        X,Y,Z,V = Q.get_vertex_values(xy=True,
372                                      precision=self.precision)
373
374        volumes[:] = V.astype(volumes.typecode())
375        x[:] = X
376        y[:] = Y
377        z[:] = Z
378
379        #FIXME: Backwards compatibility
380        z = fid.variables['z']
381        z[:] = Z
382        ################################
383
384
385
386        #Close
387        fid.close()
388
389    def store_timestep(self, names):
390        """Store time and named quantities to file
391        """
392        from Scientific.IO.NetCDF import NetCDFFile
393        import types
394        from time import sleep
395        from os import stat
396
397        from Numeric import choose
398
399        #Get NetCDF
400        retries = 0
401        file_open = False
402        while not file_open and retries < 10:
403            try:
404                fid = NetCDFFile(self.filename, 'a') #Open existing file
405            except IOError:
406                #This could happen if someone was reading the file.
407                #In that case, wait a while and try again
408                msg = 'Warning (store_timestep): File %s could not be opened'\
409                      %self.filename
410                msg += ' - trying step %s again' %self.domain.time
411                print msg
412                retries += 1
413                sleep(1)
414            else:
415                file_open = True
416
417        if not file_open:
418            msg = 'File %s could not be opened for append' %self.filename
419            raise DataFileNotOpenError, msg
420
421
422
423        #Check to see if the file is already too big:
424        time = fid.variables['time']
425        i = len(time)+1
426        file_size = stat(self.filename)[6]
427        file_size_increase =  file_size/i
428        if file_size + file_size_increase > self.max_size*(2**self.recursion):
429            #in order to get the file name and start time correct,
430            #I change the domian.filename and domain.starttime.
431            #This is the only way to do this without changing
432            #other modules (I think).
433
434            #write a filename addon that won't break swollens reader
435            #(10.sww is bad)
436            filename_ext = '_time_%s'%self.domain.time
437            filename_ext = filename_ext.replace('.', '_')
438            #remember the old filename, then give domain a
439            #name with the extension
440            old_domain_filename = self.domain.get_name()
441            if not self.recursion:
442                self.domain.set_name(old_domain_filename+filename_ext)
443
444
445            #change the domain starttime to the current time
446            old_domain_starttime = self.domain.starttime
447            self.domain.starttime = self.domain.time
448
449            #build a new data_structure.
450            next_data_structure=\
451                Data_format_sww(self.domain, mode=self.mode,\
452                                max_size = self.max_size,\
453                                recursion = self.recursion+1)
454            if not self.recursion:
455                print '    file_size = %s'%file_size
456                print '    saving file to %s'%next_data_structure.filename
457            #set up the new data_structure
458            self.domain.writer = next_data_structure
459
460            #FIXME - could be cleaner to use domain.store_timestep etc.
461            next_data_structure.store_connectivity()
462            next_data_structure.store_timestep(names)
463            fid.sync()
464            fid.close()
465
466            #restore the old starttime and filename
467            self.domain.starttime = old_domain_starttime
468            self.domain.set_name(old_domain_filename)           
469        else:
470            self.recursion = False
471            domain = self.domain
472
473            # Get the variables
474            time = fid.variables['time']
475            stage = fid.variables['stage']
476            xmomentum = fid.variables['xmomentum']
477            ymomentum = fid.variables['ymomentum']
478            i = len(time)
479
480            #Store time
481            time[i] = self.domain.time
482
483
484            if type(names) not in [types.ListType, types.TupleType]:
485                names = [names]
486
487            for name in names:
488                # Get quantity
489                Q = domain.quantities[name]
490                A,V = Q.get_vertex_values(xy = False,
491                                          precision = self.precision)
492
493
494                #FIXME: Make this general (see below)
495                if name == 'stage':
496                    z = fid.variables['elevation']
497                    #print z[:]
498                    #print A-z[:]
499
500
501                    A = choose(A-z[:] >= self.minimum_storable_height,
502                               (z[:], A))
503                    stage[i,:] = A.astype(self.precision)
504                elif name == 'xmomentum':
505                    xmomentum[i,:] = A.astype(self.precision)
506                elif name == 'ymomentum':
507                    ymomentum[i,:] = A.astype(self.precision)
508
509                #As in....
510            #eval( name + '[i,:] = A.astype(self.precision)' )
511            #FIXME: But we need a UNIT test for that before refactoring
512
513
514
515            #Flush and close
516            fid.sync()
517            fid.close()
518
519
520
521#Class for handling checkpoints data
522class Data_format_cpt(Data_format):
523    """Interface to native NetCDF format (.cpt)
524    """
525
526
527    def __init__(self, domain, mode = 'w'):
528        from Scientific.IO.NetCDF import NetCDFFile
529        from Numeric import Int, Float, Float
530
531        self.precision = Float #Use full precision
532
533        Data_format.__init__(self, domain, 'sww', mode)
534
535
536        # NetCDF file definition
537        fid = NetCDFFile(self.filename, mode)
538
539        if mode == 'w':
540            #Create new file
541            fid.institution = 'Geoscience Australia'
542            fid.description = 'Checkpoint data'
543            #fid.smooth = domain.smooth
544            fid.order = domain.default_order
545
546            # dimension definitions
547            fid.createDimension('number_of_volumes', self.number_of_volumes)
548            fid.createDimension('number_of_vertices', 3)
549
550            #Store info at all vertices (no smoothing)
551            fid.createDimension('number_of_points', 3*self.number_of_volumes)
552            fid.createDimension('number_of_timesteps', None) #extensible
553
554            # variable definitions
555
556            #Mesh
557            fid.createVariable('x', self.precision, ('number_of_points',))
558            fid.createVariable('y', self.precision, ('number_of_points',))
559
560
561            fid.createVariable('volumes', Int, ('number_of_volumes',
562                                                'number_of_vertices'))
563
564            fid.createVariable('time', self.precision,
565                               ('number_of_timesteps',))
566
567            #Allocate space for all quantities
568            for name in domain.quantities.keys():
569                fid.createVariable(name, self.precision,
570                                   ('number_of_timesteps',
571                                    'number_of_points'))
572
573        #Close
574        fid.close()
575
576
577    def store_checkpoint(self):
578        """
579        Write x,y coordinates of triangles.
580        Write connectivity (
581        constituting
582        the bed elevation.
583        """
584
585        from Scientific.IO.NetCDF import NetCDFFile
586
587        from Numeric import concatenate
588
589        domain = self.domain
590
591        #Get NetCDF
592        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
593
594        # Get the variables
595        x = fid.variables['x']
596        y = fid.variables['y']
597
598        volumes = fid.variables['volumes']
599
600        # Get X, Y and bed elevation Z
601        Q = domain.quantities['elevation']
602        X,Y,Z,V = Q.get_vertex_values(xy=True,
603                      precision = self.precision)
604
605
606
607        x[:] = X.astype(self.precision)
608        y[:] = Y.astype(self.precision)
609        z[:] = Z.astype(self.precision)
610
611        volumes[:] = V
612
613        #Close
614        fid.close()
615
616
617    def store_timestep(self, name):
618        """Store time and named quantity to file
619        """
620        from Scientific.IO.NetCDF import NetCDFFile
621        from time import sleep
622
623        #Get NetCDF
624        retries = 0
625        file_open = False
626        while not file_open and retries < 10:
627            try:
628                fid = NetCDFFile(self.filename, 'a') #Open existing file
629            except IOError:
630                #This could happen if someone was reading the file.
631                #In that case, wait a while and try again
632                msg = 'Warning (store_timestep): File %s could not be opened'\
633                  %self.filename
634                msg += ' - trying again'
635                print msg
636                retries += 1
637                sleep(1)
638            else:
639                file_open = True
640
641        if not file_open:
642            msg = 'File %s could not be opened for append' %self.filename
643            raise DataFileNotOPenError, msg
644
645
646        domain = self.domain
647
648        # Get the variables
649        time = fid.variables['time']
650        stage = fid.variables['stage']
651        i = len(time)
652
653        #Store stage
654        time[i] = self.domain.time
655
656        # Get quantity
657        Q = domain.quantities[name]
658        A,V = Q.get_vertex_values(xy=False,
659                  precision = self.precision)
660
661        stage[i,:] = A.astype(self.precision)
662
663        #Flush and close
664        fid.sync()
665        fid.close()
666
667
668
669
670
671#Function for storing xya output
672#FIXME Not done yet for this version
673#This is obsolete.  Use geo_spatial_data instead
674class Data_format_xya(Data_format):
675    """Generic interface to data formats
676    """
677
678
679    def __init__(self, domain, mode = 'w'):
680        from Scientific.IO.NetCDF import NetCDFFile
681        from Numeric import Int, Float, Float32
682
683        self.precision = Float32 #Use single precision
684
685        Data_format.__init__(self, domain, 'xya', mode)
686
687
688
689    #FIXME -This is the old xya format
690    def store_all(self):
691        """Specialisation of store all for xya format
692
693        Writes x,y,z coordinates of triangles constituting
694        the bed elevation.
695        """
696
697        from Numeric import concatenate
698
699        domain = self.domain
700
701        fd = open(self.filename, 'w')
702
703
704        if domain.smooth is True:
705            number_of_points =  len(domain.vertexlist)
706        else:
707            number_of_points = 3*self.number_of_volumes
708
709        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
710
711        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
712                 " # <vertex #> <x> <y> [attributes]" + "\n")
713
714
715        # Get X, Y, bed elevation and friction (index=0,1)
716        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
717                                           indices = (0,1), precision = self.precision)
718
719        bed_eles = A[:,0]
720        fricts = A[:,1]
721
722        # Get stage (index=0)
723        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
724                                       indices = (0,), precision = self.precision)
725
726        stages = B[:,0]
727
728        #<vertex #> <x> <y> [attributes]
729        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
730                                               stages, fricts):
731
732            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
733            fd.write(s)
734
735        #close
736        fd.close()
737
738
739    def store_timestep(self, t, V0, V1, V2):
740        """Store time, water heights (and momentums) to file
741        """
742        pass
743
744
745#### NED is national exposure database
746   
747LAT_TITLE = 'LATITUDE'
748LONG_TITLE = 'LONGITUDE'
749X_TITLE = 'x'
750Y_TITLE = 'y'
751class Exposure_csv:
752    def __init__(self,file_name, latitude_title=LAT_TITLE,
753                 longitude_title=LONG_TITLE, is_x_y_locations=None,
754                 x_title=X_TITLE, y_title=Y_TITLE,
755                 refine_polygon=None):
756        """
757        This class is for handling the exposure csv file.
758        It reads the file in and converts the lats and longs to a geospatial
759        data object.
760        Use the methods to read and write columns.
761
762        The format of the csv files it reads is;
763           The first row is a title row.
764           comma's are the delimiters
765           each column is a 'set' of data
766
767        Feel free to use/expand it to read other csv files.
768           
769           
770        It is not for adding and deleting rows
771       
772        Can geospatial handle string attributes? It's not made for them.
773        Currently it can't load and save string att's.
774
775        So just use geospatial to hold the x, y and georef? Bad, since
776        different att's are in diferent structures.  Not so bad, the info
777        to write if the .csv file is saved is in attribute_dic
778
779        The location info is in the geospatial attribute.
780       
781       
782        """
783        self._file_name = file_name
784        self._geospatial = None #
785
786        # self._attribute_dic is a dictionary.
787        #The keys are the column titles.
788        #The values are lists of column data
789       
790        # self._title_index_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are the index positions of file columns.
793        self._attribute_dic, self._title_index_dic = \
794        self._load_exposure_csv(self._file_name)
795        try:
796            lats = self._attribute_dic[latitude_title]
797            longs = self._attribute_dic[longitude_title]
798           
799        except KeyError:
800            # maybe a warning..
801            #Let's see if this works..
802            if False != is_x_y_locations:
803                is_x_y_locations = True
804            pass
805        else:
806            self._geospatial = Geospatial_data(latitudes = lats,
807                 longitudes = longs)
808
809        if is_x_y_locations is True:
810            if self._geospatial is not None:
811                pass #fixme throw an error
812            try:
813                xs = self._attribute_dic[x_title]
814                ys = self._attribute_dic[y_title]
815                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
816            except KeyError:
817                # maybe a warning..
818                msg = "Could not find location information."
819                raise TitleValueError, msg
820            else:
821                self._geospatial = Geospatial_data(data_points=points)
822           
823        # create a list of points that are in the refining_polygon
824        # described by a list of indexes representing the points
825
826    def __cmp__(self, other):
827        #print "self._attribute_dic",self._attribute_dic
828        #print "other._attribute_dic",other._attribute_dic
829        #print "self._title_index_dic", self._title_index_dic
830        #print "other._title_index_dic", other._title_index_dic
831       
832        #check that a is an instance of this class
833        if isinstance(self, type(other)):
834            result = cmp(self._attribute_dic, other._attribute_dic)
835            if result <>0:
836                return result
837            # The order of the columns is important. Therefore..
838            result = cmp(self._title_index_dic, other._title_index_dic)
839            if result <>0:
840                return result
841            for self_ls, other_ls in map(None,self._attribute_dic, \
842                    other._attribute_dic):
843                result = cmp(self._attribute_dic[self_ls],
844                             other._attribute_dic[other_ls])
845                if result <>0:
846                    return result
847            return 0
848        else:
849            return 1
850   
851    def _load_exposure_csv(self, file_name):
852        """
853        Load in the csv as a dic, title as key and column info as value, .
854        Also, create a dic, title as key and column index as value.
855        """
856        #
857        attribute_dic = {}
858        title_index_dic = {}
859        titles_stripped = [] # list of titles
860       
861        reader = csv.reader(file(file_name))
862
863        # Read in and manipulate the title info
864        titles = reader.next()
865        for i,title in enumerate(titles):
866            titles_stripped.append(title.strip())
867            title_index_dic[title.strip()] = i
868        title_count = len(titles_stripped)       
869        #print "title_index_dic",title_index_dic
870
871       
872        #create a dic of colum values, indexed by column title
873        for line in reader:
874            if len(line) <> title_count:
875                raise IOError #FIXME make this nicer
876            for i, value in enumerate(line):
877                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
878           
879        return attribute_dic, title_index_dic
880
881    def get_column(self, column_name, use_refind_polygon=False):
882        """
883        Given a column name return a list of the column values
884
885        Note, the type of the values will be String!
886        do this to change a list of strings to a list of floats
887        time = [float(x) for x in time]
888       
889        Not implemented:
890        if use_refind_polygon is True, only return values in the
891        refined polygon
892        """
893        if not self._attribute_dic.has_key(column_name):
894            msg = 'Therer is  no column called %s!' %column_name
895            raise TitleValueError, msg
896        return self._attribute_dic[column_name]
897
898
899    def get_value(self, value_column_name,
900                  known_column_name,
901                  known_values,
902                  use_refind_polygon=False):
903        """
904        Do linear interpolation on the known_colum, using the known_value,
905        to return a value of the column_value_name.
906        """
907        pass
908
909
910    def get_location(self, use_refind_polygon=False):
911        """
912        Return a geospatial object which describes the
913        locations of the location file.
914
915        Note, if there is not location info, this returns None.
916       
917        Not implemented:
918        if use_refind_polygon is True, only return values in the
919        refined polygon
920        """
921        return self._geospatial
922
923    def set_column(self, column_name, column_values, overwrite=False):
924        """
925        Add a column to the 'end' (with the right most column being the end)
926        of the csv file.
927
928        Set overwrite to True if you want to overwrite a column.
929       
930        Note, in column_name white space is removed and case is not checked.
931        Precondition
932        The column_name and column_values cannot have comma's in it.
933        """
934       
935        value_row_count = \
936            len(self._attribute_dic[self._title_index_dic.keys()[0]])
937        if len(column_values) <> value_row_count: 
938            msg = 'The number of column values must equal the number of rows.'
939            raise DataMissingValuesError, msg
940       
941        if self._attribute_dic.has_key(column_name):
942            if not overwrite:
943                msg = 'Column name %s already in use!' %column_name
944                raise TitleValueError, msg
945        else:
946            # New title.  Add it to the title index.
947            self._title_index_dic[column_name] = len(self._title_index_dic)
948        self._attribute_dic[column_name] = column_values
949        #print "self._title_index_dic[column_name]",self._title_index_dic
950
951    def save(self, file_name=None):
952        """
953        Save the exposure csv file
954        """
955        if file_name is None:
956            file_name = self._file_name
957       
958        fd = open(file_name,'wb')
959        writer = csv.writer(fd)
960       
961        #Write the title to a cvs file
962        line = [None]* len(self._title_index_dic)
963        for title in self._title_index_dic.iterkeys():
964            line[self._title_index_dic[title]]= title
965        writer.writerow(line)
966       
967        # Write the values to a cvs file
968        value_row_count = \
969            len(self._attribute_dic[self._title_index_dic.keys()[0]])
970        for row_i in range(value_row_count):
971            line = [None]* len(self._title_index_dic)
972            for title in self._title_index_dic.iterkeys():
973                line[self._title_index_dic[title]]= \
974                     self._attribute_dic[title][row_i]
975            writer.writerow(line)
976
977
978#Auxiliary
979def write_obj(filename,x,y,z):
980    """Store x,y,z vectors into filename (obj format)
981       Vectors are assumed to have dimension (M,3) where
982       M corresponds to the number elements.
983       triangles are assumed to be disconnected
984
985       The three numbers in each vector correspond to three vertices,
986
987       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
988
989    """
990    #print 'Writing obj to %s' % filename
991
992    import os.path
993
994    root, ext = os.path.splitext(filename)
995    if ext == '.obj':
996        FN = filename
997    else:
998        FN = filename + '.obj'
999
1000
1001    outfile = open(FN, 'wb')
1002    outfile.write("# Triangulation as an obj file\n")
1003
1004    M, N = x.shape
1005    assert N==3  #Assuming three vertices per element
1006
1007    for i in range(M):
1008        for j in range(N):
1009            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1010
1011    for i in range(M):
1012        base = i*N
1013        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1014
1015    outfile.close()
1016
1017
1018#########################################################
1019#Conversion routines
1020########################################################
1021
1022def sww2obj(basefilename, size):
1023    """Convert netcdf based data output to obj
1024    """
1025    from Scientific.IO.NetCDF import NetCDFFile
1026
1027    from Numeric import Float, zeros
1028
1029    #Get NetCDF
1030    FN = create_filename('.', basefilename, 'sww', size)
1031    print 'Reading from ', FN
1032    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1033
1034
1035    # Get the variables
1036    x = fid.variables['x']
1037    y = fid.variables['y']
1038    z = fid.variables['elevation']
1039    time = fid.variables['time']
1040    stage = fid.variables['stage']
1041
1042    M = size  #Number of lines
1043    xx = zeros((M,3), Float)
1044    yy = zeros((M,3), Float)
1045    zz = zeros((M,3), Float)
1046
1047    for i in range(M):
1048        for j in range(3):
1049            xx[i,j] = x[i+j*M]
1050            yy[i,j] = y[i+j*M]
1051            zz[i,j] = z[i+j*M]
1052
1053    #Write obj for bathymetry
1054    FN = create_filename('.', basefilename, 'obj', size)
1055    write_obj(FN,xx,yy,zz)
1056
1057
1058    #Now read all the data with variable information, combine with
1059    #x,y info and store as obj
1060
1061    for k in range(len(time)):
1062        t = time[k]
1063        print 'Processing timestep %f' %t
1064
1065        for i in range(M):
1066            for j in range(3):
1067                zz[i,j] = stage[k,i+j*M]
1068
1069
1070        #Write obj for variable data
1071        #FN = create_filename(basefilename, 'obj', size, time=t)
1072        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1073        write_obj(FN,xx,yy,zz)
1074
1075
1076def dat2obj(basefilename):
1077    """Convert line based data output to obj
1078    FIXME: Obsolete?
1079    """
1080
1081    import glob, os
1082    from anuga.config import data_dir
1083
1084
1085    #Get bathymetry and x,y's
1086    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1087
1088    from Numeric import zeros, Float
1089
1090    M = len(lines)  #Number of lines
1091    x = zeros((M,3), Float)
1092    y = zeros((M,3), Float)
1093    z = zeros((M,3), Float)
1094
1095    ##i = 0
1096    for i, line in enumerate(lines):
1097        tokens = line.split()
1098        values = map(float,tokens)
1099
1100        for j in range(3):
1101            x[i,j] = values[j*3]
1102            y[i,j] = values[j*3+1]
1103            z[i,j] = values[j*3+2]
1104
1105        ##i += 1
1106
1107
1108    #Write obj for bathymetry
1109    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1110
1111
1112    #Now read all the data files with variable information, combine with
1113    #x,y info
1114    #and store as obj
1115
1116    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1117
1118    for filename in files:
1119        print 'Processing %s' % filename
1120
1121        lines = open(data_dir+os.sep+filename,'r').readlines()
1122        assert len(lines) == M
1123        root, ext = os.path.splitext(filename)
1124
1125        #Get time from filename
1126        i0 = filename.find('_time=')
1127        if i0 == -1:
1128            #Skip bathymetry file
1129            continue
1130
1131        i0 += 6  #Position where time starts
1132        i1 = filename.find('.dat')
1133
1134        if i1 > i0:
1135            t = float(filename[i0:i1])
1136        else:
1137            raise DataTimeError, 'Hmmmm'
1138
1139
1140
1141        ##i = 0
1142        for i, line in enumerate(lines):
1143            tokens = line.split()
1144            values = map(float,tokens)
1145
1146            for j in range(3):
1147                z[i,j] = values[j]
1148
1149            ##i += 1
1150
1151        #Write obj for variable data
1152        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1153
1154
1155def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1156    """Read netcdf filename1, pick timesteps first:step:last and save to
1157    nettcdf file filename2
1158    """
1159    from Scientific.IO.NetCDF import NetCDFFile
1160
1161    #Get NetCDF
1162    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1163    outfile = NetCDFFile(filename2, 'w')  #Open new file
1164
1165
1166    #Copy dimensions
1167    for d in infile.dimensions:
1168        outfile.createDimension(d, infile.dimensions[d])
1169
1170    for name in infile.variables:
1171        var = infile.variables[name]
1172        outfile.createVariable(name, var.typecode(), var.dimensions)
1173
1174
1175    #Copy the static variables
1176    for name in infile.variables:
1177        if name == 'time' or name == 'stage':
1178            pass
1179        else:
1180            #Copy
1181            outfile.variables[name][:] = infile.variables[name][:]
1182
1183    #Copy selected timesteps
1184    time = infile.variables['time']
1185    stage = infile.variables['stage']
1186
1187    newtime = outfile.variables['time']
1188    newstage = outfile.variables['stage']
1189
1190    if last is None:
1191        last = len(time)
1192
1193    selection = range(first, last, step)
1194    for i, j in enumerate(selection):
1195        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1196        newtime[i] = time[j]
1197        newstage[i,:] = stage[j,:]
1198
1199    #Close
1200    infile.close()
1201    outfile.close()
1202
1203
1204#Get data objects
1205def get_dataobject(domain, mode='w'):
1206    """Return instance of class of given format using filename
1207    """
1208
1209    cls = eval('Data_format_%s' %domain.format)
1210    return cls(domain, mode)
1211
1212#FIXME move into geospatial.  There should only be one method that
1213# reads xya, and writes pts.
1214def xya2pts(basename_in, basename_out=None, verbose=False,
1215            #easting_min=None, easting_max=None,
1216            #northing_min=None, northing_max=None,
1217            stride = 1,
1218            attribute_name = 'elevation',
1219            z_func = None):
1220    """Read points data from ascii (.xya)
1221
1222    Example:
1223
1224              x(m)        y(m)        z(m)
1225         0.00000e+00  0.00000e+00  1.3535000e-01
1226         0.00000e+00  1.40000e-02  1.3535000e-01
1227
1228
1229
1230    Convert to NetCDF pts format which is
1231
1232    points:  (Nx2) Float array
1233    elevation: N Float array
1234
1235    Only lines that contain three numeric values are processed
1236
1237    If z_func is specified, it will be applied to the third field
1238    """
1239
1240    import os
1241    #from Scientific.IO.NetCDF import NetCDFFile
1242    from Numeric import Float, arrayrange, concatenate
1243
1244    root, ext = os.path.splitext(basename_in)
1245
1246    if ext == '': ext = '.xya'
1247
1248    #Get NetCDF
1249    infile = open(root + ext, 'r')  #Open existing xya file for read
1250
1251    if verbose: print 'Reading xya points from %s' %(root + ext)
1252
1253    points = []
1254    attribute = []
1255    for i, line in enumerate(infile.readlines()):
1256
1257        if i % stride != 0: continue
1258
1259        fields = line.split()
1260
1261        try:
1262            assert len(fields) == 3
1263        except:
1264            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1265
1266        try:
1267            x = float( fields[0] )
1268            y = float( fields[1] )
1269            z = float( fields[2] )
1270        except:
1271            continue
1272
1273        points.append( [x, y] )
1274
1275        if callable(z_func):
1276            attribute.append(z_func(z))
1277        else:
1278            attribute.append(z)
1279
1280
1281    #Get output file
1282    if basename_out == None:
1283        ptsname = root + '.pts'
1284    else:
1285        ptsname = basename_out + '.pts'
1286
1287    if verbose: print 'Store to NetCDF file %s' %ptsname
1288    write_ptsfile(ptsname, points, attribute, attribute_name)
1289
1290
1291
1292######Obsoleted by export_points in load_mesh
1293def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1294                  zone=None, xllcorner=None, yllcorner=None):
1295    """Write points and associated attribute to pts (NetCDF) format
1296    """
1297
1298    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1299
1300    from Numeric import Float
1301
1302    if attribute_name is None:
1303        attribute_name = 'attribute'
1304
1305
1306    from Scientific.IO.NetCDF import NetCDFFile
1307
1308    # NetCDF file definition
1309    outfile = NetCDFFile(ptsname, 'w')
1310
1311
1312    #Create new file
1313    outfile.institution = 'Geoscience Australia'
1314    outfile.description = 'NetCDF pts format for compact and '\
1315                          'portable storage of spatial point data'
1316
1317
1318    #Georeferencing
1319    from anuga.coordinate_transforms.geo_reference import Geo_reference
1320    if zone is None:
1321        assert xllcorner is None, 'xllcorner must be None'
1322        assert yllcorner is None, 'yllcorner must be None'
1323        Geo_reference().write_NetCDF(outfile)
1324    else:
1325        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1326
1327
1328
1329    outfile.createDimension('number_of_points', len(points))
1330    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1331
1332    # variable definitions
1333    outfile.createVariable('points', Float, ('number_of_points',
1334                                             'number_of_dimensions'))
1335    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1336
1337    # Get handles to the variables
1338    nc_points = outfile.variables['points']
1339    nc_attribute = outfile.variables[attribute_name]
1340
1341    #Store data
1342    nc_points[:, :] = points
1343    nc_attribute[:] = attribute
1344
1345    outfile.close()
1346
1347
1348def dem2pts(basename_in, basename_out=None,
1349            easting_min=None, easting_max=None,
1350            northing_min=None, northing_max=None,
1351            use_cache=False, verbose=False,):
1352    """Read Digitial Elevation model from the following NetCDF format (.dem)
1353
1354    Example:
1355
1356    ncols         3121
1357    nrows         1800
1358    xllcorner     722000
1359    yllcorner     5893000
1360    cellsize      25
1361    NODATA_value  -9999
1362    138.3698 137.4194 136.5062 135.5558 ..........
1363
1364    Convert to NetCDF pts format which is
1365
1366    points:  (Nx2) Float array
1367    elevation: N Float array
1368    """
1369
1370
1371
1372    kwargs = {'basename_out': basename_out,
1373              'easting_min': easting_min,
1374              'easting_max': easting_max,
1375              'northing_min': northing_min,
1376              'northing_max': northing_max,
1377              'verbose': verbose}
1378
1379    if use_cache is True:
1380        from caching import cache
1381        result = cache(_dem2pts, basename_in, kwargs,
1382                       dependencies = [basename_in + '.dem'],
1383                       verbose = verbose)
1384
1385    else:
1386        result = apply(_dem2pts, [basename_in], kwargs)
1387
1388    return result
1389
1390
1391def _dem2pts(basename_in, basename_out=None, verbose=False,
1392            easting_min=None, easting_max=None,
1393            northing_min=None, northing_max=None):
1394    """Read Digitial Elevation model from the following NetCDF format (.dem)
1395
1396    Internal function. See public function dem2pts for details.
1397    """
1398
1399    #FIXME: Can this be written feasibly using write_pts?
1400
1401    import os
1402    from Scientific.IO.NetCDF import NetCDFFile
1403    from Numeric import Float, zeros, reshape, sum
1404
1405    root = basename_in
1406
1407    #Get NetCDF
1408    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1409
1410    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1411
1412    ncols = infile.ncols[0]
1413    nrows = infile.nrows[0]
1414    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1415    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1416    cellsize = infile.cellsize[0]
1417    NODATA_value = infile.NODATA_value[0]
1418    dem_elevation = infile.variables['elevation']
1419
1420    zone = infile.zone[0]
1421    false_easting = infile.false_easting[0]
1422    false_northing = infile.false_northing[0]
1423
1424    #Text strings
1425    projection = infile.projection
1426    datum = infile.datum
1427    units = infile.units
1428
1429
1430    #Get output file
1431    if basename_out == None:
1432        ptsname = root + '.pts'
1433    else:
1434        ptsname = basename_out + '.pts'
1435
1436    if verbose: print 'Store to NetCDF file %s' %ptsname
1437    # NetCDF file definition
1438    outfile = NetCDFFile(ptsname, 'w')
1439
1440    #Create new file
1441    outfile.institution = 'Geoscience Australia'
1442    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1443                      'of spatial point data'
1444    #assign default values
1445    if easting_min is None: easting_min = xllcorner
1446    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1447    if northing_min is None: northing_min = yllcorner
1448    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1449
1450    #compute offsets to update georeferencing
1451    easting_offset = xllcorner - easting_min
1452    northing_offset = yllcorner - northing_min
1453
1454    #Georeferencing
1455    outfile.zone = zone
1456    outfile.xllcorner = easting_min #Easting of lower left corner
1457    outfile.yllcorner = northing_min #Northing of lower left corner
1458    outfile.false_easting = false_easting
1459    outfile.false_northing = false_northing
1460
1461    outfile.projection = projection
1462    outfile.datum = datum
1463    outfile.units = units
1464
1465
1466    #Grid info (FIXME: probably not going to be used, but heck)
1467    outfile.ncols = ncols
1468    outfile.nrows = nrows
1469
1470    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1471    totalnopoints = nrows*ncols
1472
1473    # calculating number of NODATA_values for each row in clipped region
1474    #FIXME: use array operations to do faster
1475    nn = 0
1476    k = 0
1477    i1_0 = 0
1478    j1_0 = 0
1479    thisj = 0
1480    thisi = 0
1481    for i in range(nrows):
1482        y = (nrows-i-1)*cellsize + yllcorner
1483        for j in range(ncols):
1484            x = j*cellsize + xllcorner
1485            if easting_min <= x <= easting_max and \
1486               northing_min <= y <= northing_max:
1487                thisj = j
1488                thisi = i
1489                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1490
1491                if k == 0:
1492                    i1_0 = i
1493                    j1_0 = j
1494                k += 1
1495
1496    index1 = j1_0
1497    index2 = thisj
1498
1499    # dimension definitions
1500    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1501    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1502
1503    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1504    nopoints = clippednopoints-nn
1505
1506    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1507
1508    if verbose:
1509        print 'There are %d values in the elevation' %totalnopoints
1510        print 'There are %d values in the clipped elevation' %clippednopoints
1511        print 'There are %d NODATA_values in the clipped elevation' %nn
1512
1513    outfile.createDimension('number_of_points', nopoints)
1514    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1515
1516    # variable definitions
1517    outfile.createVariable('points', Float, ('number_of_points',
1518                                             'number_of_dimensions'))
1519    outfile.createVariable('elevation', Float, ('number_of_points',))
1520
1521    # Get handles to the variables
1522    points = outfile.variables['points']
1523    elevation = outfile.variables['elevation']
1524
1525    lenv = index2-index1+1
1526    #Store data
1527    global_index = 0
1528    #for i in range(nrows):
1529    for i in range(i1_0,thisi+1,1):
1530        if verbose and i%((nrows+10)/10)==0:
1531            print 'Processing row %d of %d' %(i, nrows)
1532
1533        lower_index = global_index
1534
1535        v = dem_elevation_r[i,index1:index2+1]
1536        no_NODATA = sum(v == NODATA_value)
1537        if no_NODATA > 0:
1538            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1539        else:
1540            newcols = lenv #ncols_in_bounding_box
1541
1542        telev = zeros(newcols, Float)
1543        tpoints = zeros((newcols, 2), Float)
1544
1545        local_index = 0
1546
1547        y = (nrows-i-1)*cellsize + yllcorner
1548        #for j in range(ncols):
1549        for j in range(j1_0,index2+1,1):
1550
1551            x = j*cellsize + xllcorner
1552            if easting_min <= x <= easting_max and \
1553               northing_min <= y <= northing_max and \
1554               dem_elevation_r[i,j] <> NODATA_value:
1555                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1556                telev[local_index] = dem_elevation_r[i, j]
1557                global_index += 1
1558                local_index += 1
1559
1560        upper_index = global_index
1561
1562        if upper_index == lower_index + newcols:
1563            points[lower_index:upper_index, :] = tpoints
1564            elevation[lower_index:upper_index] = telev
1565
1566    assert global_index == nopoints, 'index not equal to number of points'
1567
1568    infile.close()
1569    outfile.close()
1570
1571
1572
1573def _read_hecras_cross_sections(lines):
1574    """Return block of surface lines for each cross section
1575    Starts with SURFACE LINE,
1576    Ends with END CROSS-SECTION
1577    """
1578
1579    points = []
1580
1581    reading_surface = False
1582    for i, line in enumerate(lines):
1583
1584        if len(line.strip()) == 0:    #Ignore blanks
1585            continue
1586
1587        if lines[i].strip().startswith('SURFACE LINE'):
1588            reading_surface = True
1589            continue
1590
1591        if lines[i].strip().startswith('END') and reading_surface:
1592            yield points
1593            reading_surface = False
1594            points = []
1595
1596        if reading_surface:
1597            fields = line.strip().split(',')
1598            easting = float(fields[0])
1599            northing = float(fields[1])
1600            elevation = float(fields[2])
1601            points.append([easting, northing, elevation])
1602
1603
1604
1605
1606def hecras_cross_sections2pts(basename_in,
1607                              basename_out=None,
1608                              verbose=False):
1609    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1610
1611    Example:
1612
1613
1614# RAS export file created on Mon 15Aug2005 11:42
1615# by HEC-RAS Version 3.1.1
1616
1617BEGIN HEADER:
1618  UNITS: METRIC
1619  DTM TYPE: TIN
1620  DTM: v:\1\cit\perth_topo\river_tin
1621  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1622  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1623  MAP PROJECTION: UTM
1624  PROJECTION ZONE: 50
1625  DATUM: AGD66
1626  VERTICAL DATUM:
1627  NUMBER OF REACHES:  19
1628  NUMBER OF CROSS-SECTIONS:  14206
1629END HEADER:
1630
1631
1632Only the SURFACE LINE data of the following form will be utilised
1633
1634  CROSS-SECTION:
1635    STREAM ID:Southern-Wungong
1636    REACH ID:Southern-Wungong
1637    STATION:19040.*
1638    CUT LINE:
1639      405548.671603161 , 6438142.7594925
1640      405734.536092045 , 6438326.10404912
1641      405745.130459356 , 6438331.48627354
1642      405813.89633823 , 6438368.6272789
1643    SURFACE LINE:
1644     405548.67,   6438142.76,   35.37
1645     405552.24,   6438146.28,   35.41
1646     405554.78,   6438148.78,   35.44
1647     405555.80,   6438149.79,   35.44
1648     405559.37,   6438153.31,   35.45
1649     405560.88,   6438154.81,   35.44
1650     405562.93,   6438156.83,   35.42
1651     405566.50,   6438160.35,   35.38
1652     405566.99,   6438160.83,   35.37
1653     ...
1654   END CROSS-SECTION
1655
1656    Convert to NetCDF pts format which is
1657
1658    points:  (Nx2) Float array
1659    elevation: N Float array
1660    """
1661
1662    #FIXME: Can this be written feasibly using write_pts?
1663
1664    import os
1665    from Scientific.IO.NetCDF import NetCDFFile
1666    from Numeric import Float, zeros, reshape
1667
1668    root = basename_in
1669
1670    #Get ASCII file
1671    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1672
1673    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1674
1675    lines = infile.readlines()
1676    infile.close()
1677
1678    if verbose: print 'Converting to pts format'
1679
1680    i = 0
1681    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1682        i += 1
1683
1684    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1685    i += 1
1686
1687    assert lines[i].strip().upper().startswith('UNITS:')
1688    units = lines[i].strip().split()[1]
1689    i += 1
1690
1691    assert lines[i].strip().upper().startswith('DTM TYPE:')
1692    i += 1
1693
1694    assert lines[i].strip().upper().startswith('DTM:')
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('STREAM')
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('CROSS')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1704    projection = lines[i].strip().split(':')[1]
1705    i += 1
1706
1707    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1708    zone = int(lines[i].strip().split(':')[1])
1709    i += 1
1710
1711    assert lines[i].strip().upper().startswith('DATUM:')
1712    datum = lines[i].strip().split(':')[1]
1713    i += 1
1714
1715    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1716    i += 1
1717
1718    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1719    i += 1
1720
1721    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1722    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1723    i += 1
1724
1725
1726    #Now read all points
1727    points = []
1728    elevation = []
1729    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1730        for k, entry in enumerate(entries):
1731            points.append(entry[:2])
1732            elevation.append(entry[2])
1733
1734
1735    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1736          %(j+1, number_of_cross_sections)
1737    assert j+1 == number_of_cross_sections, msg
1738
1739    #Get output file
1740    if basename_out == None:
1741        ptsname = root + '.pts'
1742    else:
1743        ptsname = basename_out + '.pts'
1744
1745    #FIXME (DSG-ON): use loadASCII export_points_file
1746    if verbose: print 'Store to NetCDF file %s' %ptsname
1747    # NetCDF file definition
1748    outfile = NetCDFFile(ptsname, 'w')
1749
1750    #Create new file
1751    outfile.institution = 'Geoscience Australia'
1752    outfile.description = 'NetCDF pts format for compact and portable ' +\
1753                          'storage of spatial point data derived from HEC-RAS'
1754
1755    #Georeferencing
1756    outfile.zone = zone
1757    outfile.xllcorner = 0.0
1758    outfile.yllcorner = 0.0
1759    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1760    outfile.false_northing = 1000000
1761
1762    outfile.projection = projection
1763    outfile.datum = datum
1764    outfile.units = units
1765
1766
1767    outfile.createDimension('number_of_points', len(points))
1768    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1769
1770    # variable definitions
1771    outfile.createVariable('points', Float, ('number_of_points',
1772                                             'number_of_dimensions'))
1773    outfile.createVariable('elevation', Float, ('number_of_points',))
1774
1775    # Get handles to the variables
1776    outfile.variables['points'][:] = points
1777    outfile.variables['elevation'][:] = elevation
1778
1779    outfile.close()
1780
1781
1782
1783def sww2dem(basename_in, basename_out = None,
1784            quantity = None,
1785            timestep = None,
1786            reduction = None,
1787            cellsize = 10,
1788            NODATA_value = -9999,
1789            easting_min = None,
1790            easting_max = None,
1791            northing_min = None,
1792            northing_max = None,
1793            verbose = False,
1794            origin = None,
1795            datum = 'WGS84',
1796        format = 'ers'):
1797
1798    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1799
1800    Example (ASC):
1801
1802    ncols         3121
1803    nrows         1800
1804    xllcorner     722000
1805    yllcorner     5893000
1806    cellsize      25
1807    NODATA_value  -9999
1808    138.3698 137.4194 136.5062 135.5558 ..........
1809
1810    Also write accompanying file with same basename_in but extension .prj
1811    used to fix the UTM zone, datum, false northings and eastings.
1812
1813    The prj format is assumed to be as
1814
1815    Projection    UTM
1816    Zone          56
1817    Datum         WGS84
1818    Zunits        NO
1819    Units         METERS
1820    Spheroid      WGS84
1821    Xshift        0.0000000000
1822    Yshift        10000000.0000000000
1823    Parameters
1824
1825
1826    The parameter quantity must be the name of an existing quantity or
1827    an expression involving existing quantities. The default is
1828    'elevation'.
1829
1830    if timestep (an index) is given, output quantity at that timestep
1831
1832    if reduction is given use that to reduce quantity over all timesteps.
1833
1834    datum
1835
1836    format can be either 'asc' or 'ers'
1837    """
1838
1839    import sys
1840    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1841    from Numeric import array2string
1842
1843    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1844    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1845
1846    msg = 'Format must be either asc or ers'
1847    assert format.lower() in ['asc', 'ers'], msg
1848
1849
1850    false_easting = 500000
1851    false_northing = 10000000
1852
1853    if quantity is None:
1854        quantity = 'elevation'
1855
1856    if reduction is None:
1857        reduction = max
1858
1859    if basename_out is None:
1860        basename_out = basename_in + '_%s' %quantity
1861
1862    swwfile = basename_in + '.sww'
1863    demfile = basename_out + '.' + format
1864    # Note the use of a .ers extension is optional (write_ermapper_grid will
1865    # deal with either option
1866
1867    #Read sww file
1868    if verbose: print 'Reading from %s' %swwfile
1869    from Scientific.IO.NetCDF import NetCDFFile
1870    fid = NetCDFFile(swwfile)
1871
1872    #Get extent and reference
1873    x = fid.variables['x'][:]
1874    y = fid.variables['y'][:]
1875    volumes = fid.variables['volumes'][:]
1876    if timestep is not None:
1877        times = fid.variables['time'][timestep]
1878    else:
1879        times = fid.variables['time'][:]
1880
1881    number_of_timesteps = fid.dimensions['number_of_timesteps']
1882    number_of_points = fid.dimensions['number_of_points']
1883   
1884    if origin is None:
1885
1886        #Get geo_reference
1887        #sww files don't have to have a geo_ref
1888        try:
1889            geo_reference = Geo_reference(NetCDFObject=fid)
1890        except AttributeError, e:
1891            geo_reference = Geo_reference() #Default georef object
1892
1893        xllcorner = geo_reference.get_xllcorner()
1894        yllcorner = geo_reference.get_yllcorner()
1895        zone = geo_reference.get_zone()
1896    else:
1897        zone = origin[0]
1898        xllcorner = origin[1]
1899        yllcorner = origin[2]
1900
1901
1902
1903    #FIXME: Refactor using code from file_function.statistics
1904    #Something like print swwstats(swwname)
1905    if verbose:
1906        print '------------------------------------------------'
1907        print 'Statistics of SWW file:'
1908        print '  Name: %s' %swwfile
1909        print '  Reference:'
1910        print '    Lower left corner: [%f, %f]'\
1911              %(xllcorner, yllcorner)
1912        if timestep is not None:
1913            print '    Time: %f' %(times)
1914        else:
1915            print '    Start time: %f' %fid.starttime[0]
1916        print '  Extent:'
1917        print '    x [m] in [%f, %f], len(x) == %d'\
1918              %(min(x.flat), max(x.flat), len(x.flat))
1919        print '    y [m] in [%f, %f], len(y) == %d'\
1920              %(min(y.flat), max(y.flat), len(y.flat))
1921        if timestep is not None:
1922            print '    t [s] = %f, len(t) == %d' %(times, 1)
1923        else:
1924            print '    t [s] in [%f, %f], len(t) == %d'\
1925              %(min(times), max(times), len(times))
1926        print '  Quantities [SI units]:'
1927        for name in ['stage', 'xmomentum', 'ymomentum']:
1928            q = fid.variables[name][:].flat
1929            if timestep is not None:
1930                q = q[timestep*len(x):(timestep+1)*len(x)]
1931            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1932        for name in ['elevation']:
1933            q = fid.variables[name][:].flat
1934            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1935           
1936    # Get quantity and reduce if applicable
1937    if verbose: print 'Processing quantity %s' %quantity
1938
1939    # Turn NetCDF objects into Numeric arrays
1940    quantity_dict = {}
1941    for name in fid.variables.keys():
1942        quantity_dict[name] = fid.variables[name][:] 
1943
1944
1945    # Convert quantity expression to quantities found in sww file   
1946    q = apply_expression_to_dictionary(quantity, quantity_dict)
1947
1948    if len(q.shape) == 2:
1949        #q has a time component and needs to be reduced along
1950        #the temporal dimension
1951        if verbose: print 'Reducing quantity %s' %quantity
1952        q_reduced = zeros( number_of_points, Float )
1953       
1954        if timestep is not None:
1955            for k in range(number_of_points):
1956                q_reduced[k] = q[timestep,k] 
1957        else:
1958            for k in range(number_of_points):
1959                q_reduced[k] = reduction( q[:,k] )
1960
1961        q = q_reduced
1962
1963    #Post condition: Now q has dimension: number_of_points
1964    assert len(q.shape) == 1
1965    assert q.shape[0] == number_of_points
1966
1967
1968    if verbose:
1969        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1970
1971
1972    #Create grid and update xll/yll corner and x,y
1973
1974    #Relative extent
1975    if easting_min is None:
1976        xmin = min(x)
1977    else:
1978        xmin = easting_min - xllcorner
1979
1980    if easting_max is None:
1981        xmax = max(x)
1982    else:
1983        xmax = easting_max - xllcorner
1984
1985    if northing_min is None:
1986        ymin = min(y)
1987    else:
1988        ymin = northing_min - yllcorner
1989
1990    if northing_max is None:
1991        ymax = max(y)
1992    else:
1993        ymax = northing_max - yllcorner
1994
1995
1996
1997    if verbose: print 'Creating grid'
1998    ncols = int((xmax-xmin)/cellsize)+1
1999    nrows = int((ymax-ymin)/cellsize)+1
2000
2001
2002    #New absolute reference and coordinates
2003    newxllcorner = xmin+xllcorner
2004    newyllcorner = ymin+yllcorner
2005
2006    x = x+xllcorner-newxllcorner
2007    y = y+yllcorner-newyllcorner
2008   
2009    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2010    assert len(vertex_points.shape) == 2
2011
2012    grid_points = zeros ( (ncols*nrows, 2), Float )
2013
2014
2015    for i in xrange(nrows):
2016        if format.lower() == 'asc':
2017            yg = i*cellsize
2018        else:
2019        #this will flip the order of the y values for ers
2020            yg = (nrows-i)*cellsize
2021
2022        for j in xrange(ncols):
2023            xg = j*cellsize
2024            k = i*ncols + j
2025
2026            grid_points[k,0] = xg
2027            grid_points[k,1] = yg
2028
2029    #Interpolate
2030    #from least_squares import Interpolation
2031    from anuga.fit_interpolate.interpolate import Interpolate
2032
2033    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2034
2035    #Interpolate using quantity values
2036    if verbose: print 'Interpolating'
2037    grid_values = interp.interpolate(q, grid_points).flat
2038
2039
2040    if verbose:
2041        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2042                                                      max(grid_values))
2043
2044    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2045    P = interp.mesh.get_boundary_polygon()
2046    outside_indices = outside_polygon(grid_points, P, closed=True)
2047
2048    for i in outside_indices:
2049        grid_values[i] = NODATA_value
2050
2051
2052
2053
2054    if format.lower() == 'ers':
2055        # setup ERS header information
2056        grid_values = reshape(grid_values,(nrows, ncols))
2057        header = {}
2058        header['datum'] = '"' + datum + '"'
2059        # FIXME The use of hardwired UTM and zone number needs to be made optional
2060        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2061        header['projection'] = '"UTM-' + str(zone) + '"'
2062        header['coordinatetype'] = 'EN'
2063        if header['coordinatetype'] == 'LL':
2064            header['longitude'] = str(newxllcorner)
2065            header['latitude'] = str(newyllcorner)
2066        elif header['coordinatetype'] == 'EN':
2067            header['eastings'] = str(newxllcorner)
2068            header['northings'] = str(newyllcorner)
2069        header['nullcellvalue'] = str(NODATA_value)
2070        header['xdimension'] = str(cellsize)
2071        header['ydimension'] = str(cellsize)
2072        header['value'] = '"' + quantity + '"'
2073        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2074
2075
2076        #Write
2077        if verbose: print 'Writing %s' %demfile
2078        import ermapper_grids
2079        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2080
2081        fid.close()
2082    else:
2083        #Write to Ascii format
2084
2085        #Write prj file
2086        prjfile = basename_out + '.prj'
2087
2088        if verbose: print 'Writing %s' %prjfile
2089        prjid = open(prjfile, 'w')
2090        prjid.write('Projection    %s\n' %'UTM')
2091        prjid.write('Zone          %d\n' %zone)
2092        prjid.write('Datum         %s\n' %datum)
2093        prjid.write('Zunits        NO\n')
2094        prjid.write('Units         METERS\n')
2095        prjid.write('Spheroid      %s\n' %datum)
2096        prjid.write('Xshift        %d\n' %false_easting)
2097        prjid.write('Yshift        %d\n' %false_northing)
2098        prjid.write('Parameters\n')
2099        prjid.close()
2100
2101
2102
2103        if verbose: print 'Writing %s' %demfile
2104
2105        ascid = open(demfile, 'w')
2106
2107        ascid.write('ncols         %d\n' %ncols)
2108        ascid.write('nrows         %d\n' %nrows)
2109        ascid.write('xllcorner     %d\n' %newxllcorner)
2110        ascid.write('yllcorner     %d\n' %newyllcorner)
2111        ascid.write('cellsize      %f\n' %cellsize)
2112        ascid.write('NODATA_value  %d\n' %NODATA_value)
2113
2114
2115        #Get bounding polygon from mesh
2116        #P = interp.mesh.get_boundary_polygon()
2117        #inside_indices = inside_polygon(grid_points, P)
2118
2119        for i in range(nrows):
2120            if verbose and i%((nrows+10)/10)==0:
2121                print 'Doing row %d of %d' %(i, nrows)
2122
2123            base_index = (nrows-i-1)*ncols
2124
2125            slice = grid_values[base_index:base_index+ncols]
2126            s = array2string(slice, max_line_width=sys.maxint)
2127            ascid.write(s[1:-1] + '\n')
2128
2129
2130            #print
2131            #for j in range(ncols):
2132            #    index = base_index+j#
2133            #    print grid_values[index],
2134            #    ascid.write('%f ' %grid_values[index])
2135            #ascid.write('\n')
2136
2137        #Close
2138        ascid.close()
2139        fid.close()
2140
2141#Backwards compatibility
2142def sww2asc(basename_in, basename_out = None,
2143            quantity = None,
2144            timestep = None,
2145            reduction = None,
2146            cellsize = 10,
2147            verbose = False,
2148            origin = None):
2149    print 'sww2asc will soon be obsoleted - please use sww2dem'
2150    sww2dem(basename_in,
2151            basename_out = basename_out,
2152            quantity = quantity,
2153            timestep = timestep,
2154            reduction = reduction,
2155            cellsize = cellsize,
2156            verbose = verbose,
2157            origin = origin,
2158        datum = 'WGS84',
2159        format = 'asc')
2160
2161def sww2ers(basename_in, basename_out = None,
2162            quantity = None,
2163            timestep = None,
2164            reduction = None,
2165            cellsize = 10,
2166            verbose = False,
2167            origin = None,
2168            datum = 'WGS84'):
2169    print 'sww2ers will soon be obsoleted - please use sww2dem'
2170    sww2dem(basename_in,
2171            basename_out = basename_out,
2172            quantity = quantity,
2173            timestep = timestep,
2174            reduction = reduction,
2175            cellsize = cellsize,
2176            verbose = verbose,
2177            origin = origin,
2178            datum = datum,
2179            format = 'ers')
2180################################# END COMPATIBILITY ##############
2181
2182
2183
2184def sww2pts(basename_in, basename_out=None,
2185            data_points=None,
2186            quantity=None,
2187            timestep=None,
2188            reduction=None,
2189            NODATA_value=-9999,
2190            verbose=False,
2191            origin=None):
2192            #datum = 'WGS84')
2193
2194
2195    """Read SWW file and convert to interpolated values at selected points
2196
2197    The parameter quantity' must be the name of an existing quantity or
2198    an expression involving existing quantities. The default is
2199    'elevation'.
2200
2201    if timestep (an index) is given, output quantity at that timestep
2202
2203    if reduction is given use that to reduce quantity over all timesteps.
2204
2205    data_points (Nx2 array) give locations of points where quantity is to be computed.
2206   
2207    """
2208
2209    import sys
2210    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2211    from Numeric import array2string
2212
2213    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2214    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2215
2216    from anuga.geospatial_data.geospatial_data import Geospatial_data
2217
2218    if quantity is None:
2219        quantity = 'elevation'
2220
2221    if reduction is None:
2222        reduction = max
2223
2224    if basename_out is None:
2225        basename_out = basename_in + '_%s' %quantity
2226
2227    swwfile = basename_in + '.sww'
2228    ptsfile = basename_out + '.pts'
2229
2230    # Read sww file
2231    if verbose: print 'Reading from %s' %swwfile
2232    from Scientific.IO.NetCDF import NetCDFFile
2233    fid = NetCDFFile(swwfile)
2234
2235    # Get extent and reference
2236    x = fid.variables['x'][:]
2237    y = fid.variables['y'][:]
2238    volumes = fid.variables['volumes'][:]
2239
2240    number_of_timesteps = fid.dimensions['number_of_timesteps']
2241    number_of_points = fid.dimensions['number_of_points']
2242    if origin is None:
2243
2244        # Get geo_reference
2245        # sww files don't have to have a geo_ref
2246        try:
2247            geo_reference = Geo_reference(NetCDFObject=fid)
2248        except AttributeError, e:
2249            geo_reference = Geo_reference() #Default georef object
2250
2251        xllcorner = geo_reference.get_xllcorner()
2252        yllcorner = geo_reference.get_yllcorner()
2253        zone = geo_reference.get_zone()
2254    else:
2255        zone = origin[0]
2256        xllcorner = origin[1]
2257        yllcorner = origin[2]
2258
2259
2260
2261    # FIXME: Refactor using code from file_function.statistics
2262    # Something like print swwstats(swwname)
2263    if verbose:
2264        x = fid.variables['x'][:]
2265        y = fid.variables['y'][:]
2266        times = fid.variables['time'][:]
2267        print '------------------------------------------------'
2268        print 'Statistics of SWW file:'
2269        print '  Name: %s' %swwfile
2270        print '  Reference:'
2271        print '    Lower left corner: [%f, %f]'\
2272              %(xllcorner, yllcorner)
2273        print '    Start time: %f' %fid.starttime[0]
2274        print '  Extent:'
2275        print '    x [m] in [%f, %f], len(x) == %d'\
2276              %(min(x.flat), max(x.flat), len(x.flat))
2277        print '    y [m] in [%f, %f], len(y) == %d'\
2278              %(min(y.flat), max(y.flat), len(y.flat))
2279        print '    t [s] in [%f, %f], len(t) == %d'\
2280              %(min(times), max(times), len(times))
2281        print '  Quantities [SI units]:'
2282        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2283            q = fid.variables[name][:].flat
2284            print '    %s in [%f, %f]' %(name, min(q), max(q))
2285
2286
2287
2288    # Get quantity and reduce if applicable
2289    if verbose: print 'Processing quantity %s' %quantity
2290
2291    # Turn NetCDF objects into Numeric arrays
2292    quantity_dict = {}
2293    for name in fid.variables.keys():
2294        quantity_dict[name] = fid.variables[name][:]
2295
2296
2297
2298    # Convert quantity expression to quantities found in sww file   
2299    q = apply_expression_to_dictionary(quantity, quantity_dict)
2300
2301
2302
2303    if len(q.shape) == 2:
2304        # q has a time component and needs to be reduced along
2305        # the temporal dimension
2306        if verbose: print 'Reducing quantity %s' %quantity
2307        q_reduced = zeros( number_of_points, Float )
2308
2309        for k in range(number_of_points):
2310            q_reduced[k] = reduction( q[:,k] )
2311
2312        q = q_reduced
2313
2314    # Post condition: Now q has dimension: number_of_points
2315    assert len(q.shape) == 1
2316    assert q.shape[0] == number_of_points
2317
2318
2319    if verbose:
2320        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2321
2322
2323    # Create grid and update xll/yll corner and x,y
2324    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2325    assert len(vertex_points.shape) == 2
2326
2327    # Interpolate
2328    from anuga.fit_interpolate.interpolate import Interpolate
2329    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2330
2331    # Interpolate using quantity values
2332    if verbose: print 'Interpolating'
2333    interpolated_values = interp.interpolate(q, data_points).flat
2334
2335
2336    if verbose:
2337        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2338                                                      max(interpolated_values))
2339
2340    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2341    P = interp.mesh.get_boundary_polygon()
2342    outside_indices = outside_polygon(data_points, P, closed=True)
2343
2344    for i in outside_indices:
2345        interpolated_values[i] = NODATA_value
2346
2347
2348    # Store results   
2349    G = Geospatial_data(data_points=data_points,
2350                        attributes=interpolated_values)
2351
2352    G.export_points_file(ptsfile, absolute = True)
2353
2354    fid.close()
2355
2356
2357def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2358                                  use_cache = False,
2359                                  verbose = False):
2360    """Read Digitial Elevation model from the following ASCII format (.asc)
2361
2362    Example:
2363
2364    ncols         3121
2365    nrows         1800
2366    xllcorner     722000
2367    yllcorner     5893000
2368    cellsize      25
2369    NODATA_value  -9999
2370    138.3698 137.4194 136.5062 135.5558 ..........
2371
2372    Convert basename_in + '.asc' to NetCDF format (.dem)
2373    mimicking the ASCII format closely.
2374
2375
2376    An accompanying file with same basename_in but extension .prj must exist
2377    and is used to fix the UTM zone, datum, false northings and eastings.
2378
2379    The prj format is assumed to be as
2380
2381    Projection    UTM
2382    Zone          56
2383    Datum         WGS84
2384    Zunits        NO
2385    Units         METERS
2386    Spheroid      WGS84
2387    Xshift        0.0000000000
2388    Yshift        10000000.0000000000
2389    Parameters
2390    """
2391
2392
2393
2394    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2395
2396    if use_cache is True:
2397        from caching import cache
2398        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2399                       dependencies = [basename_in + '.asc'],
2400                       verbose = verbose)
2401
2402    else:
2403        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2404
2405    return result
2406
2407
2408
2409
2410
2411
2412def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2413                                  verbose = False):
2414    """Read Digitial Elevation model from the following ASCII format (.asc)
2415
2416    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2417    """
2418
2419    import os
2420    from Scientific.IO.NetCDF import NetCDFFile
2421    from Numeric import Float, array
2422
2423    #root, ext = os.path.splitext(basename_in)
2424    root = basename_in
2425
2426    ###########################################
2427    # Read Meta data
2428    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2429    metadatafile = open(root + '.prj')
2430    metalines = metadatafile.readlines()
2431    metadatafile.close()
2432
2433    L = metalines[0].strip().split()
2434    assert L[0].strip().lower() == 'projection'
2435    projection = L[1].strip()                   #TEXT
2436
2437    L = metalines[1].strip().split()
2438    assert L[0].strip().lower() == 'zone'
2439    zone = int(L[1].strip())
2440
2441    L = metalines[2].strip().split()
2442    assert L[0].strip().lower() == 'datum'
2443    datum = L[1].strip()                        #TEXT
2444
2445    L = metalines[3].strip().split()
2446    assert L[0].strip().lower() == 'zunits'     #IGNORE
2447    zunits = L[1].strip()                       #TEXT
2448
2449    L = metalines[4].strip().split()
2450    assert L[0].strip().lower() == 'units'
2451    units = L[1].strip()                        #TEXT
2452
2453    L = metalines[5].strip().split()
2454    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2455    spheroid = L[1].strip()                     #TEXT
2456
2457    L = metalines[6].strip().split()
2458    assert L[0].strip().lower() == 'xshift'
2459    false_easting = float(L[1].strip())
2460
2461    L = metalines[7].strip().split()
2462    assert L[0].strip().lower() == 'yshift'
2463    false_northing = float(L[1].strip())
2464
2465    #print false_easting, false_northing, zone, datum
2466
2467
2468    ###########################################
2469    #Read DEM data
2470
2471    datafile = open(basename_in + '.asc')
2472
2473    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2474    lines = datafile.readlines()
2475    datafile.close()
2476
2477    if verbose: print 'Got', len(lines), ' lines'
2478
2479    ncols = int(lines[0].split()[1].strip())
2480    nrows = int(lines[1].split()[1].strip())
2481    xllcorner = float(lines[2].split()[1].strip())
2482    yllcorner = float(lines[3].split()[1].strip())
2483    cellsize = float(lines[4].split()[1].strip())
2484    NODATA_value = int(lines[5].split()[1].strip())
2485
2486    assert len(lines) == nrows + 6
2487
2488
2489    ##########################################
2490
2491
2492    if basename_out == None:
2493        netcdfname = root + '.dem'
2494    else:
2495        netcdfname = basename_out + '.dem'
2496
2497    if verbose: print 'Store to NetCDF file %s' %netcdfname
2498    # NetCDF file definition
2499    fid = NetCDFFile(netcdfname, 'w')
2500
2501    #Create new file
2502    fid.institution = 'Geoscience Australia'
2503    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2504                      'of spatial point data'
2505
2506    fid.ncols = ncols
2507    fid.nrows = nrows
2508    fid.xllcorner = xllcorner
2509    fid.yllcorner = yllcorner
2510    fid.cellsize = cellsize
2511    fid.NODATA_value = NODATA_value
2512
2513    fid.zone = zone
2514    fid.false_easting = false_easting
2515    fid.false_northing = false_northing
2516    fid.projection = projection
2517    fid.datum = datum
2518    fid.units = units
2519
2520
2521    # dimension definitions
2522    fid.createDimension('number_of_rows', nrows)
2523    fid.createDimension('number_of_columns', ncols)
2524
2525    # variable definitions
2526    fid.createVariable('elevation', Float, ('number_of_rows',
2527                                            'number_of_columns'))
2528
2529    # Get handles to the variables
2530    elevation = fid.variables['elevation']
2531
2532    #Store data
2533    n = len(lines[6:])
2534    for i, line in enumerate(lines[6:]):
2535        fields = line.split()
2536        if verbose and i%((n+10)/10)==0:
2537            print 'Processing row %d of %d' %(i, nrows)
2538
2539        elevation[i, :] = array([float(x) for x in fields])
2540
2541    fid.close()
2542
2543
2544
2545
2546
2547def ferret2sww(basename_in, basename_out = None,
2548               verbose = False,
2549               minlat = None, maxlat = None,
2550               minlon = None, maxlon = None,
2551               mint = None, maxt = None, mean_stage = 0,
2552               origin = None, zscale = 1,
2553               fail_on_NaN = True,
2554               NaN_filler = 0,
2555               elevation = None,
2556               inverted_bathymetry = True
2557               ): #FIXME: Bathymetry should be obtained
2558                                  #from MOST somehow.
2559                                  #Alternatively from elsewhere
2560                                  #or, as a last resort,
2561                                  #specified here.
2562                                  #The value of -100 will work
2563                                  #for the Wollongong tsunami
2564                                  #scenario but is very hacky
2565    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2566    sww format native to abstract_2d_finite_volumes.
2567
2568    Specify only basename_in and read files of the form
2569    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2570    relative height, x-velocity and y-velocity, respectively.
2571
2572    Also convert latitude and longitude to UTM. All coordinates are
2573    assumed to be given in the GDA94 datum.
2574
2575    min's and max's: If omitted - full extend is used.
2576    To include a value min may equal it, while max must exceed it.
2577    Lat and lon are assuemd to be in decimal degrees
2578
2579    origin is a 3-tuple with geo referenced
2580    UTM coordinates (zone, easting, northing)
2581
2582    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2583    which means that longitude is the fastest
2584    varying dimension (row major order, so to speak)
2585
2586    ferret2sww uses grid points as vertices in a triangular grid
2587    counting vertices from lower left corner upwards, then right
2588    """
2589
2590    import os
2591    from Scientific.IO.NetCDF import NetCDFFile
2592    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2593    from Numeric import allclose, around
2594
2595    precision = Float
2596
2597    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2598
2599    if minlat != None:
2600        assert -90 < minlat < 90 , msg
2601    if maxlat != None:
2602        assert -90 < maxlat < 90 , msg
2603    if minlon != None:
2604        assert -180 < minlon < 180 , msg
2605    if maxlon != None:
2606        assert -180 < maxlon < 180 , msg
2607
2608
2609    #Get NetCDF data
2610    if verbose: print 'Reading files %s_*.nc' %basename_in
2611    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2612    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2613    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2614    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2615    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2616
2617    if basename_out is None:
2618        swwname = basename_in + '.sww'
2619    else:
2620        swwname = basename_out + '.sww'
2621
2622#    Get dimensions of file_h
2623    for dimension in file_h.dimensions.keys():
2624        if dimension[:3] == 'LON':
2625            dim_h_longitude = dimension
2626        if dimension[:3] == 'LAT':
2627            dim_h_latitude = dimension
2628        if dimension[:4] == 'TIME':
2629            dim_h_time = dimension
2630
2631#    print 'long:', dim_h_longitude
2632#    print 'lats:', dim_h_latitude
2633#    print 'times:', dim_h_time
2634
2635    times = file_h.variables[dim_h_time]
2636    latitudes = file_h.variables[dim_h_latitude]
2637    longitudes = file_h.variables[dim_h_longitude]
2638   
2639# get dimensions for file_e
2640    for dimension in file_e.dimensions.keys():
2641        if dimension[:3] == 'LON':
2642            dim_e_longitude = dimension
2643        if dimension[:3] == 'LAT':
2644            dim_e_latitude = dimension
2645
2646# get dimensions for file_u
2647    for dimension in file_u.dimensions.keys():
2648        if dimension[:3] == 'LON':
2649            dim_u_longitude = dimension
2650        if dimension[:3] == 'LAT':
2651            dim_u_latitude = dimension
2652        if dimension[:4] == 'TIME':
2653            dim_u_time = dimension
2654           
2655# get dimensions for file_v
2656    for dimension in file_v.dimensions.keys():
2657        if dimension[:3] == 'LON':
2658            dim_v_longitude = dimension
2659        if dimension[:3] == 'LAT':
2660            dim_v_latitude = dimension
2661        if dimension[:4] == 'TIME':
2662            dim_v_time = dimension
2663
2664
2665    #Precision used by most for lat/lon is 4 or 5 decimals
2666    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2667    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2668
2669    #Check that files are compatible
2670    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2671    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2672    assert allclose(latitudes, e_lat)
2673
2674    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2675    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2676    assert allclose(longitudes, e_lon)
2677
2678    if mint == None:
2679        jmin = 0
2680    else:
2681        jmin = searchsorted(times, mint)
2682       
2683    if maxt == None:
2684        jmax=len(times)
2685    else:
2686        jmax = searchsorted(times, maxt)
2687
2688    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2689                                                  longitudes[:],
2690                                                 minlat, maxlat,
2691                                                 minlon, maxlon)
2692
2693
2694#    print' j', jmin, jmax
2695    times = times[jmin:jmax]
2696    latitudes = latitudes[kmin:kmax]
2697    longitudes = longitudes[lmin:lmax]
2698
2699
2700    if verbose: print 'cropping'
2701    zname = 'ELEVATION'
2702
2703    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2704    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2705    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2706    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2707
2708    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2709    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2710    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2711    #        from Numeric import asarray
2712    #        elevations=elevations.tolist()
2713    #        elevations.reverse()
2714    #        elevations=asarray(elevations)
2715    #    else:
2716    #        from Numeric import asarray
2717    #        elevations=elevations.tolist()
2718    #        elevations.reverse()
2719    #        elevations=asarray(elevations)
2720    #        'print hmmm'
2721
2722
2723
2724    #Get missing values
2725    nan_ha = file_h.variables['HA'].missing_value[0]
2726    nan_ua = file_u.variables['UA'].missing_value[0]
2727    nan_va = file_v.variables['VA'].missing_value[0]
2728    if hasattr(file_e.variables[zname],'missing_value'):
2729        nan_e  = file_e.variables[zname].missing_value[0]
2730    else:
2731        nan_e = None
2732
2733    #Cleanup
2734    from Numeric import sometrue
2735
2736    missing = (amplitudes == nan_ha)
2737    if sometrue (missing):
2738        if fail_on_NaN:
2739            msg = 'NetCDFFile %s contains missing values'\
2740                  %(basename_in+'_ha.nc')
2741            raise DataMissingValuesError, msg
2742        else:
2743            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2744
2745    missing = (uspeed == nan_ua)
2746    if sometrue (missing):
2747        if fail_on_NaN:
2748            msg = 'NetCDFFile %s contains missing values'\
2749                  %(basename_in+'_ua.nc')
2750            raise DataMissingValuesError, msg
2751        else:
2752            uspeed = uspeed*(missing==0) + missing*NaN_filler
2753
2754    missing = (vspeed == nan_va)
2755    if sometrue (missing):
2756        if fail_on_NaN:
2757            msg = 'NetCDFFile %s contains missing values'\
2758                  %(basename_in+'_va.nc')
2759            raise DataMissingValuesError, msg
2760        else:
2761            vspeed = vspeed*(missing==0) + missing*NaN_filler
2762
2763
2764    missing = (elevations == nan_e)
2765    if sometrue (missing):
2766        if fail_on_NaN:
2767            msg = 'NetCDFFile %s contains missing values'\
2768                  %(basename_in+'_e.nc')
2769            raise DataMissingValuesError, msg
2770        else:
2771            elevations = elevations*(missing==0) + missing*NaN_filler
2772
2773    #######
2774
2775
2776
2777    number_of_times = times.shape[0]
2778    number_of_latitudes = latitudes.shape[0]
2779    number_of_longitudes = longitudes.shape[0]
2780
2781    assert amplitudes.shape[0] == number_of_times
2782    assert amplitudes.shape[1] == number_of_latitudes
2783    assert amplitudes.shape[2] == number_of_longitudes
2784
2785    if verbose:
2786        print '------------------------------------------------'
2787        print 'Statistics:'
2788        print '  Extent (lat/lon):'
2789        print '    lat in [%f, %f], len(lat) == %d'\
2790              %(min(latitudes.flat), max(latitudes.flat),
2791                len(latitudes.flat))
2792        print '    lon in [%f, %f], len(lon) == %d'\
2793              %(min(longitudes.flat), max(longitudes.flat),
2794                len(longitudes.flat))
2795        print '    t in [%f, %f], len(t) == %d'\
2796              %(min(times.flat), max(times.flat), len(times.flat))
2797
2798        q = amplitudes.flat
2799        name = 'Amplitudes (ha) [cm]'
2800        print %s in [%f, %f]' %(name, min(q), max(q))
2801
2802        q = uspeed.flat
2803        name = 'Speeds (ua) [cm/s]'
2804        print %s in [%f, %f]' %(name, min(q), max(q))
2805
2806        q = vspeed.flat
2807        name = 'Speeds (va) [cm/s]'
2808        print %s in [%f, %f]' %(name, min(q), max(q))
2809
2810        q = elevations.flat
2811        name = 'Elevations (e) [m]'
2812        print %s in [%f, %f]' %(name, min(q), max(q))
2813
2814
2815    #print number_of_latitudes, number_of_longitudes
2816    number_of_points = number_of_latitudes*number_of_longitudes
2817    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2818
2819
2820    file_h.close()
2821    file_u.close()
2822    file_v.close()
2823    file_e.close()
2824
2825
2826    # NetCDF file definition
2827    outfile = NetCDFFile(swwname, 'w')
2828
2829    #Create new file
2830    outfile.institution = 'Geoscience Australia'
2831    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2832                          %(basename_in + '_ha.nc',
2833                            basename_in + '_ua.nc',
2834                            basename_in + '_va.nc',
2835                            basename_in + '_e.nc')
2836
2837
2838    #For sww compatibility
2839    outfile.smoothing = 'Yes'
2840    outfile.order = 1
2841
2842    #Start time in seconds since the epoch (midnight 1/1/1970)
2843    outfile.starttime = starttime = times[0]
2844    times = times - starttime  #Store relative times
2845
2846    # dimension definitions
2847    outfile.createDimension('number_of_volumes', number_of_volumes)
2848
2849    outfile.createDimension('number_of_vertices', 3)
2850    outfile.createDimension('number_of_points', number_of_points)
2851
2852
2853    #outfile.createDimension('number_of_timesteps', len(times))
2854    outfile.createDimension('number_of_timesteps', len(times))
2855
2856    # variable definitions
2857    outfile.createVariable('x', precision, ('number_of_points',))
2858    outfile.createVariable('y', precision, ('number_of_points',))
2859    outfile.createVariable('elevation', precision, ('number_of_points',))
2860
2861    #FIXME: Backwards compatibility
2862    outfile.createVariable('z', precision, ('number_of_points',))
2863    #################################
2864
2865    outfile.createVariable('volumes', Int, ('number_of_volumes',
2866                                            'number_of_vertices'))
2867
2868    outfile.createVariable('time', precision,
2869                           ('number_of_timesteps',))
2870
2871    outfile.createVariable('stage', precision,
2872                           ('number_of_timesteps',
2873                            'number_of_points'))
2874
2875    outfile.createVariable('xmomentum', precision,
2876                           ('number_of_timesteps',
2877                            'number_of_points'))
2878
2879    outfile.createVariable('ymomentum', precision,
2880                           ('number_of_timesteps',
2881                            'number_of_points'))
2882
2883
2884    #Store
2885    from anuga.coordinate_transforms.redfearn import redfearn
2886    x = zeros(number_of_points, Float)  #Easting
2887    y = zeros(number_of_points, Float)  #Northing
2888
2889
2890    if verbose: print 'Making triangular grid'
2891    #Check zone boundaries
2892    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2893
2894    vertices = {}
2895    i = 0
2896    for k, lat in enumerate(latitudes):       #Y direction
2897        for l, lon in enumerate(longitudes):  #X direction
2898
2899            vertices[l,k] = i
2900
2901            zone, easting, northing = redfearn(lat,lon)
2902
2903            msg = 'Zone boundary crossed at longitude =', lon
2904            #assert zone == refzone, msg
2905            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2906            x[i] = easting
2907            y[i] = northing
2908            i += 1
2909
2910
2911    #Construct 2 triangles per 'rectangular' element
2912    volumes = []
2913    for l in range(number_of_longitudes-1):    #X direction
2914        for k in range(number_of_latitudes-1): #Y direction
2915            v1 = vertices[l,k+1]
2916            v2 = vertices[l,k]
2917            v3 = vertices[l+1,k+1]
2918            v4 = vertices[l+1,k]
2919
2920            volumes.append([v1,v2,v3]) #Upper element
2921            volumes.append([v4,v3,v2]) #Lower element
2922
2923    volumes = array(volumes)
2924
2925    if origin == None:
2926        zone = refzone
2927        xllcorner = min(x)
2928        yllcorner = min(y)
2929    else:
2930        zone = origin[0]
2931        xllcorner = origin[1]
2932        yllcorner = origin[2]
2933
2934
2935    outfile.xllcorner = xllcorner
2936    outfile.yllcorner = yllcorner
2937    outfile.zone = zone
2938
2939
2940    if elevation is not None:
2941        z = elevation
2942    else:
2943        if inverted_bathymetry:
2944            z = -1*elevations
2945        else:
2946            z = elevations
2947    #FIXME: z should be obtained from MOST and passed in here
2948
2949    from Numeric import resize
2950    z = resize(z,outfile.variables['z'][:].shape)
2951    outfile.variables['x'][:] = x - xllcorner
2952    outfile.variables['y'][:] = y - yllcorner
2953    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2954    outfile.variables['elevation'][:] = z
2955    outfile.variables['time'][:] = times   #Store time relative
2956    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2957
2958
2959
2960    #Time stepping
2961    stage = outfile.variables['stage']
2962    xmomentum = outfile.variables['xmomentum']
2963    ymomentum = outfile.variables['ymomentum']
2964
2965    if verbose: print 'Converting quantities'
2966    n = len(times)
2967    for j in range(n):
2968        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2969        i = 0
2970        for k in range(number_of_latitudes):      #Y direction
2971            for l in range(number_of_longitudes): #X direction
2972                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2973                stage[j,i] = w
2974                h = w - z[i]
2975                xmomentum[j,i] = uspeed[j,k,l]/100*h
2976                ymomentum[j,i] = vspeed[j,k,l]/100*h
2977                i += 1
2978
2979    #outfile.close()
2980
2981    #FIXME: Refactor using code from file_function.statistics
2982    #Something like print swwstats(swwname)
2983    if verbose:
2984        x = outfile.variables['x'][:]
2985        y = outfile.variables['y'][:]
2986        times = outfile.variables['time'][:]
2987        print '------------------------------------------------'
2988        print 'Statistics of output file:'
2989        print '  Name: %s' %swwname
2990        print '  Reference:'
2991        print '    Lower left corner: [%f, %f]'\
2992              %(xllcorner, yllcorner)
2993        print '    Start time: %f' %starttime
2994        print '  Extent:'
2995        print '    x [m] in [%f, %f], len(x) == %d'\
2996              %(min(x.flat), max(x.flat), len(x.flat))
2997        print '    y [m] in [%f, %f], len(y) == %d'\
2998              %(min(y.flat), max(y.flat), len(y.flat))
2999        print '    t [s] in [%f, %f], len(t) == %d'\
3000              %(min(times), max(times), len(times))
3001        print '  Quantities [SI units]:'
3002        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3003            q = outfile.variables[name][:].flat
3004            print '    %s in [%f, %f]' %(name, min(q), max(q))
3005
3006
3007
3008    outfile.close()
3009
3010
3011
3012
3013
3014def timefile2netcdf(filename, quantity_names = None):
3015    """Template for converting typical text files with time series to
3016    NetCDF tms file.
3017
3018
3019    The file format is assumed to be either two fields separated by a comma:
3020
3021        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3022
3023    E.g
3024
3025      31/08/04 00:00:00, 1.328223 0 0
3026      31/08/04 00:15:00, 1.292912 0 0
3027
3028    will provide a time dependent function f(t) with three attributes
3029
3030    filename is assumed to be the rootname with extenisons .txt and .sww
3031    """
3032
3033    import time, calendar
3034    from Numeric import array
3035    from anuga.config import time_format
3036    from anuga.utilities.numerical_tools import ensure_numeric
3037
3038
3039
3040    fid = open(filename + '.txt')
3041    line = fid.readline()
3042    fid.close()
3043
3044    fields = line.split(',')
3045    msg = 'File %s must have the format date, value0 value1 value2 ...'
3046    assert len(fields) == 2, msg
3047
3048    try:
3049        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3050    except ValueError:
3051        msg = 'First field in file %s must be' %filename
3052        msg += ' date-time with format %s.\n' %time_format
3053        msg += 'I got %s instead.' %fields[0]
3054        raise DataTimeError, msg
3055
3056
3057    #Split values
3058    values = []
3059    for value in fields[1].split():
3060        values.append(float(value))
3061
3062    q = ensure_numeric(values)
3063
3064    msg = 'ERROR: File must contain at least one independent value'
3065    assert len(q.shape) == 1, msg
3066
3067
3068
3069    #Read times proper
3070    from Numeric import zeros, Float, alltrue
3071    from anuga.config import time_format
3072    import time, calendar
3073
3074    fid = open(filename + '.txt')
3075    lines = fid.readlines()
3076    fid.close()
3077
3078    N = len(lines)
3079    d = len(q)
3080
3081    T = zeros(N, Float)       #Time
3082    Q = zeros((N, d), Float)  #Values
3083
3084    for i, line in enumerate(lines):
3085        fields = line.split(',')
3086        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3087
3088        T[i] = realtime - starttime
3089
3090        for j, value in enumerate(fields[1].split()):
3091            Q[i, j] = float(value)
3092
3093    msg = 'File %s must list time as a monotonuosly ' %filename
3094    msg += 'increasing sequence'
3095    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3096
3097
3098    #Create NetCDF file
3099    from Scientific.IO.NetCDF import NetCDFFile
3100
3101    fid = NetCDFFile(filename + '.tms', 'w')
3102
3103
3104    fid.institution = 'Geoscience Australia'
3105    fid.description = 'Time series'
3106
3107
3108    #Reference point
3109    #Start time in seconds since the epoch (midnight 1/1/1970)
3110    #FIXME: Use Georef
3111    fid.starttime = starttime
3112
3113
3114    # dimension definitions
3115    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3116    #fid.createDimension('number_of_vertices', 3)
3117
3118
3119    fid.createDimension('number_of_timesteps', len(T))
3120
3121    fid.createVariable('time', Float, ('number_of_timesteps',))
3122
3123    fid.variables['time'][:] = T
3124
3125    for i in range(Q.shape[1]):
3126        try:
3127            name = quantity_names[i]
3128        except:
3129            name = 'Attribute%d'%i
3130
3131        fid.createVariable(name, Float, ('number_of_timesteps',))
3132        fid.variables[name][:] = Q[:,i]
3133
3134    fid.close()
3135
3136
3137def extent_sww(file_name):
3138    """
3139    Read in an sww file.
3140
3141    Input;
3142    file_name - the sww file
3143
3144    Output;
3145    z - Vector of bed elevation
3146    volumes - Array.  Each row has 3 values, representing
3147    the vertices that define the volume
3148    time - Vector of the times where there is stage information
3149    stage - array with respect to time and vertices (x,y)
3150    """
3151
3152
3153    from Scientific.IO.NetCDF import NetCDFFile
3154
3155    #Check contents
3156    #Get NetCDF
3157    fid = NetCDFFile(file_name, 'r')
3158
3159    # Get the variables
3160    x = fid.variables['x'][:]
3161    y = fid.variables['y'][:]
3162    stage = fid.variables['stage'][:]
3163    #print "stage",stage
3164    #print "stage.shap",stage.shape
3165    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3166    #print "min(stage)",min(stage)
3167
3168    fid.close()
3169
3170    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3171
3172
3173def sww2domain(filename,boundary=None,t=None,\
3174               fail_if_NaN=True,NaN_filler=0\
3175               ,verbose = False,very_verbose = False):
3176    """
3177    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3178
3179    Boundary is not recommended if domain.smooth is not selected, as it
3180    uses unique coordinates, but not unique boundaries. This means that
3181    the boundary file will not be compatable with the coordinates, and will
3182    give a different final boundary, or crash.
3183    """
3184    NaN=9.969209968386869e+036
3185    #initialise NaN.
3186
3187    from Scientific.IO.NetCDF import NetCDFFile
3188    from shallow_water import Domain
3189    from Numeric import asarray, transpose, resize
3190
3191    if verbose: print 'Reading from ', filename
3192    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3193    time = fid.variables['time']       #Timesteps
3194    if t is None:
3195        t = time[-1]
3196    time_interp = get_time_interp(time,t)
3197
3198    # Get the variables as Numeric arrays
3199    x = fid.variables['x'][:]             #x-coordinates of vertices
3200    y = fid.variables['y'][:]             #y-coordinates of vertices
3201    elevation = fid.variables['elevation']     #Elevation
3202    stage = fid.variables['stage']     #Water level
3203    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3204    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3205
3206    starttime = fid.starttime[0]
3207    volumes = fid.variables['volumes'][:] #Connectivity
3208    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3209
3210    conserved_quantities = []
3211    interpolated_quantities = {}
3212    other_quantities = []
3213
3214    # get geo_reference
3215    #sww files don't have to have a geo_ref
3216    try:
3217        geo_reference = Geo_reference(NetCDFObject=fid)
3218    except: #AttributeError, e:
3219        geo_reference = None
3220
3221    if verbose: print '    getting quantities'
3222    for quantity in fid.variables.keys():
3223        dimensions = fid.variables[quantity].dimensions
3224        if 'number_of_timesteps' in dimensions:
3225            conserved_quantities.append(quantity)
3226            interpolated_quantities[quantity]=\
3227                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3228        else: other_quantities.append(quantity)
3229
3230    other_quantities.remove('x')
3231    other_quantities.remove('y')
3232    other_quantities.remove('z')
3233    other_quantities.remove('volumes')
3234
3235    conserved_quantities.remove('time')
3236
3237    if verbose: print '    building domain'
3238    #    From domain.Domain:
3239    #    domain = Domain(coordinates, volumes,\
3240    #                    conserved_quantities = conserved_quantities,\
3241    #                    other_quantities = other_quantities,zone=zone,\
3242    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3243
3244    #   From shallow_water.Domain:
3245    coordinates=coordinates.tolist()
3246    volumes=volumes.tolist()
3247    #FIXME:should this be in mesh?(peter row)
3248    if fid.smoothing == 'Yes': unique = False
3249    else: unique = True
3250    if unique:
3251        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3252
3253
3254    try:
3255        domain = Domain(coordinates, volumes, boundary)
3256    except AssertionError, e:
3257        fid.close()
3258        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3259        raise DataDomainError, msg
3260
3261    if not boundary is None:
3262        domain.boundary = boundary
3263
3264    domain.geo_reference = geo_reference
3265
3266    domain.starttime=float(starttime)+float(t)
3267    domain.time=0.0
3268
3269    for quantity in other_quantities:
3270        try:
3271            NaN = fid.variables[quantity].missing_value
3272        except:
3273            pass #quantity has no missing_value number
3274        X = fid.variables[quantity][:]
3275        if very_verbose:
3276            print '       ',quantity
3277            print '        NaN =',NaN
3278            print '        max(X)'
3279            print '       ',max(X)
3280            print '        max(X)==NaN'
3281            print '       ',max(X)==NaN
3282            print ''
3283        if (max(X)==NaN) or (min(X)==NaN):
3284            if fail_if_NaN:
3285                msg = 'quantity "%s" contains no_data entry'%quantity
3286                raise DataMissingValuesError, msg
3287            else:
3288                data = (X<>NaN)
3289                X = (X*data)+(data==0)*NaN_filler
3290        if unique:
3291            X = resize(X,(len(X)/3,3))
3292        domain.set_quantity(quantity,X)
3293    #
3294    for quantity in conserved_quantities:
3295        try:
3296            NaN = fid.variables[quantity].missing_value
3297        except:
3298            pass #quantity has no missing_value number
3299        X = interpolated_quantities[quantity]
3300        if very_verbose:
3301            print '       ',quantity
3302            print '        NaN =',NaN
3303            print '        max(X)'
3304            print '       ',max(X)
3305            print '        max(X)==NaN'
3306            print '       ',max(X)==NaN
3307            print ''
3308        if (max(X)==NaN) or (min(X)==NaN):
3309            if fail_if_NaN:
3310                msg = 'quantity "%s" contains no_data entry'%quantity
3311                raise DataMissingValuesError, msg
3312            else:
3313                data = (X<>NaN)
3314                X = (X*data)+(data==0)*NaN_filler
3315        if unique:
3316            X = resize(X,(X.shape[0]/3,3))
3317        domain.set_quantity(quantity,X)
3318
3319    fid.close()
3320    return domain
3321
3322def interpolated_quantity(saved_quantity,time_interp):
3323
3324    #given an index and ratio, interpolate quantity with respect to time.
3325    index,ratio = time_interp
3326    Q = saved_quantity
3327    if ratio > 0:
3328        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3329    else:
3330        q = Q[index]
3331    #Return vector of interpolated values
3332    return q
3333
3334def get_time_interp(time,t=None):
3335    #Finds the ratio and index for time interpolation.
3336    #It is borrowed from previous abstract_2d_finite_volumes code.
3337    if t is None:
3338        t=time[-1]
3339        index = -1
3340        ratio = 0.
3341    else:
3342        T = time
3343        tau = t
3344        index=0
3345        msg = 'Time interval derived from file %s [%s:%s]'\
3346            %('FIXMEfilename', T[0], T[-1])
3347        msg += ' does not match model time: %s' %tau
3348        if tau < time[0]: raise DataTimeError, msg
3349        if tau > time[-1]: raise DataTimeError, msg
3350        while tau > time[index]: index += 1
3351        while tau < time[index]: index -= 1
3352        if tau == time[index]:
3353            #Protect against case where tau == time[-1] (last time)
3354            # - also works in general when tau == time[i]
3355            ratio = 0
3356        else:
3357            #t is now between index and index+1
3358            ratio = (tau - time[index])/(time[index+1] - time[index])
3359    return (index,ratio)
3360
3361
3362def weed(coordinates,volumes,boundary = None):
3363    if type(coordinates)=='array':
3364        coordinates = coordinates.tolist()
3365    if type(volumes)=='array':
3366        volumes = volumes.tolist()
3367
3368    unique = False
3369    point_dict = {}
3370    same_point = {}
3371    for i in range(len(coordinates)):
3372        point = tuple(coordinates[i])
3373        if point_dict.has_key(point):
3374            unique = True
3375            same_point[i]=point
3376            #to change all point i references to point j
3377        else:
3378            point_dict[point]=i
3379            same_point[i]=point
3380
3381    coordinates = []
3382    i = 0
3383    for point in point_dict.keys():
3384        point = tuple(point)
3385        coordinates.append(list(point))
3386        point_dict[point]=i
3387        i+=1
3388
3389
3390    for volume in volumes:
3391        for i in range(len(volume)):
3392            index = volume[i]
3393            if index>-1:
3394                volume[i]=point_dict[same_point[index]]
3395
3396    new_boundary = {}
3397    if not boundary is None:
3398        for segment in boundary.keys():
3399            point0 = point_dict[same_point[segment[0]]]
3400            point1 = point_dict[same_point[segment[1]]]
3401            label = boundary[segment]
3402            #FIXME should the bounday attributes be concaterated
3403            #('exterior, pond') or replaced ('pond')(peter row)
3404
3405            if new_boundary.has_key((point0,point1)):
3406                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3407                                              #+','+label
3408
3409            elif new_boundary.has_key((point1,point0)):
3410                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3411                                              #+','+label
3412            else: new_boundary[(point0,point1)]=label
3413
3414        boundary = new_boundary
3415
3416    return coordinates,volumes,boundary
3417
3418
3419def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3420                 verbose=False):
3421    """Read Digitial Elevation model from the following NetCDF format (.dem)
3422
3423    Example:
3424
3425    ncols         3121
3426    nrows         1800
3427    xllcorner     722000
3428    yllcorner     5893000
3429    cellsize      25
3430    NODATA_value  -9999
3431    138.3698 137.4194 136.5062 135.5558 ..........
3432
3433    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3434    """
3435
3436    import os
3437    from Scientific.IO.NetCDF import NetCDFFile
3438    from Numeric import Float, zeros, sum, reshape, equal
3439
3440    root = basename_in
3441    inname = root + '.dem'
3442
3443    #Open existing netcdf file to read
3444    infile = NetCDFFile(inname, 'r')
3445    if verbose: print 'Reading DEM from %s' %inname
3446
3447    #Read metadata
3448    ncols = infile.ncols[0]
3449    nrows = infile.nrows[0]
3450    xllcorner = infile.xllcorner[0]
3451    yllcorner = infile.yllcorner[0]
3452    cellsize = infile.cellsize[0]
3453    NODATA_value = infile.NODATA_value[0]
3454    zone = infile.zone[0]
3455    false_easting = infile.false_easting[0]
3456    false_northing = infile.false_northing[0]
3457    projection = infile.projection
3458    datum = infile.datum
3459    units = infile.units
3460
3461    dem_elevation = infile.variables['elevation']
3462
3463    #Get output file name
3464    if basename_out == None:
3465        outname = root + '_' + repr(cellsize_new) + '.dem'
3466    else:
3467        outname = basename_out + '.dem'
3468
3469    if verbose: print 'Write decimated NetCDF file to %s' %outname
3470
3471    #Determine some dimensions for decimated grid
3472    (nrows_stencil, ncols_stencil) = stencil.shape
3473    x_offset = ncols_stencil / 2
3474    y_offset = nrows_stencil / 2
3475    cellsize_ratio = int(cellsize_new / cellsize)
3476    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3477    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3478
3479    #Open netcdf file for output
3480    outfile = NetCDFFile(outname, 'w')
3481
3482    #Create new file
3483    outfile.institution = 'Geoscience Australia'
3484    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3485                           'of spatial point data'
3486    #Georeferencing
3487    outfile.zone = zone
3488    outfile.projection = projection
3489    outfile.datum = datum
3490    outfile.units = units
3491
3492    outfile.cellsize = cellsize_new
3493    outfile.NODATA_value = NODATA_value
3494    outfile.false_easting = false_easting
3495    outfile.false_northing = false_northing
3496
3497    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3498    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3499    outfile.ncols = ncols_new
3500    outfile.nrows = nrows_new
3501
3502    # dimension definition
3503    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3504
3505    # variable definition
3506    outfile.createVariable('elevation', Float, ('number_of_points',))
3507
3508    # Get handle to the variable
3509    elevation = outfile.variables['elevation']
3510
3511    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3512
3513    #Store data
3514    global_index = 0
3515    for i in range(nrows_new):
3516        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3517        lower_index = global_index
3518        telev =  zeros(ncols_new, Float)
3519        local_index = 0
3520        trow = i * cellsize_ratio
3521
3522        for j in range(ncols_new):
3523            tcol = j * cellsize_ratio
3524            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3525
3526            #if dem contains 1 or more NODATA_values set value in
3527            #decimated dem to NODATA_value, else compute decimated
3528            #value using stencil
3529            if sum(sum(equal(tmp, NODATA_value))) > 0:
3530                telev[local_index] = NODATA_value
3531            else:
3532                telev[local_index] = sum(sum(tmp * stencil))
3533
3534            global_index += 1
3535            local_index += 1
3536
3537        upper_index = global_index
3538
3539        elevation[lower_index:upper_index] = telev
3540
3541    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3542
3543    infile.close()
3544    outfile.close()
3545
3546
3547
3548
3549def tsh2sww(filename, verbose=False): #test_tsh2sww
3550    """
3551    to check if a tsh/msh file 'looks' good.
3552    """
3553
3554    from shallow_water import Domain
3555    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
3556    import time, os
3557    #from data_manager import get_dataobject
3558    from os import sep, path
3559    from anuga.utilities.numerical_tools import mean
3560
3561    if verbose == True:print 'Creating domain from', filename
3562    domain = pmesh_to_domain_instance(filename, Domain)
3563    if verbose == True:print "Number of triangles = ", len(domain)
3564
3565    domain.smooth = True
3566    domain.format = 'sww'   #Native netcdf visualisation format
3567    file_path, filename = path.split(filename)
3568    filename, ext = path.splitext(filename)
3569    domain.set_name(filename)   
3570    domain.reduction = mean
3571    if verbose == True:print "file_path",file_path
3572    if file_path == "":file_path = "."
3573    domain.set_datadir(file_path)
3574
3575    if verbose == True:
3576        print "Output written to " + domain.get_datadir() + sep + \
3577              domain.get_name() + "." + domain.format
3578    sww = get_dataobject(domain)
3579    sww.store_connectivity()
3580    sww.store_timestep('stage')
3581
3582
3583def asc_csiro2sww(bath_dir,
3584                  elevation_dir,
3585                  ucur_dir,
3586                  vcur_dir,
3587                  sww_file,
3588                  minlat = None, maxlat = None,
3589                  minlon = None, maxlon = None,
3590                  zscale=1,
3591                  mean_stage = 0,
3592                  fail_on_NaN = True,
3593                  elevation_NaN_filler = 0,
3594                  bath_prefix='ba',
3595                  elevation_prefix='el',
3596                  verbose=False):
3597    """
3598    Produce an sww boundary file, from esri ascii data from CSIRO.
3599
3600    Also convert latitude and longitude to UTM. All coordinates are
3601    assumed to be given in the GDA94 datum.
3602
3603    assume:
3604    All files are in esri ascii format
3605
3606    4 types of information
3607    bathymetry
3608    elevation
3609    u velocity
3610    v velocity
3611
3612    Assumptions
3613    The metadata of all the files is the same
3614    Each type is in a seperate directory
3615    One bath file with extention .000
3616    The time period is less than 24hrs and uniform.
3617    """
3618    from Scientific.IO.NetCDF import NetCDFFile
3619
3620    from anuga.coordinate_transforms.redfearn import redfearn
3621
3622    precision = Float # So if we want to change the precision its done here
3623
3624    # go in to the bath dir and load the only file,
3625    bath_files = os.listdir(bath_dir)
3626    #print "bath_files",bath_files
3627
3628    #fixme: make more general?
3629    bath_file = bath_files[0]
3630    bath_dir_file =  bath_dir + os.sep + bath_file
3631    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3632    #print "bath_metadata",bath_metadata
3633    #print "bath_grid",bath_grid
3634
3635    #Use the date.time of the bath file as a basis for
3636    #the start time for other files
3637    base_start = bath_file[-12:]
3638
3639    #go into the elevation dir and load the 000 file
3640    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3641                         + base_start
3642    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3643    #print "elevation_dir_file",elevation_dir_file
3644    #print "elevation_grid", elevation_grid
3645
3646    elevation_files = os.listdir(elevation_dir)
3647    ucur_files = os.listdir(ucur_dir)
3648    vcur_files = os.listdir(vcur_dir)
3649    elevation_files.sort()
3650    # the first elevation file should be the
3651    # file with the same base name as the bath data
3652    #print "elevation_files",elevation_files
3653    #print "'el' + base_start",'el' + base_start
3654    assert elevation_files[0] == 'el' + base_start
3655
3656    #assert bath_metadata == elevation_metadata
3657
3658
3659
3660    number_of_latitudes = bath_grid.shape[0]
3661    number_of_longitudes = bath_grid.shape[1]
3662    #number_of_times = len(os.listdir(elevation_dir))
3663    #number_of_points = number_of_latitudes*number_of_longitudes
3664    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3665
3666    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3667                  for x in range(number_of_longitudes)]
3668    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3669                 for y in range(number_of_latitudes)]
3670
3671     # reverse order of lat, so the fist lat represents the first grid row
3672    latitudes.reverse()
3673
3674    #print "latitudes - before _get_min_max_indexes",latitudes
3675    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3676                                                 minlat=minlat, maxlat=maxlat,
3677                                                 minlon=minlon, maxlon=maxlon)
3678
3679
3680    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3681    #print "bath_grid",bath_grid
3682    latitudes = latitudes[kmin:kmax]
3683    longitudes = longitudes[lmin:lmax]
3684    number_of_latitudes = len(latitudes)
3685    number_of_longitudes = len(longitudes)
3686    number_of_times = len(os.listdir(elevation_dir))
3687    number_of_points = number_of_latitudes*number_of_longitudes
3688    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3689    #print "number_of_points",number_of_points
3690
3691    #Work out the times
3692    if len(elevation_files) > 1:
3693        # Assume: The time period is less than 24hrs.
3694        time_period = (int(elevation_files[1][-3:]) - \
3695                      int(elevation_files[0][-3:]))*60*60
3696        times = [x*time_period for x in range(len(elevation_files))]
3697    else:
3698        times = [0.0]
3699    #print "times", times
3700    #print "number_of_latitudes", number_of_latitudes
3701    #print "number_of_longitudes", number_of_longitudes
3702    #print "number_of_times", number_of_times
3703    #print "latitudes", latitudes
3704    #print "longitudes", longitudes
3705
3706
3707    if verbose:
3708        print '------------------------------------------------'
3709        print 'Statistics:'
3710        print '  Extent (lat/lon):'
3711        print '    lat in [%f, %f], len(lat) == %d'\
3712              %(min(latitudes), max(latitudes),
3713                len(latitudes))
3714        print '    lon in [%f, %f], len(lon) == %d'\
3715              %(min(longitudes), max(longitudes),
3716                len(longitudes))
3717        print '    t in [%f, %f], len(t) == %d'\
3718              %(min(times), max(times), len(times))
3719
3720    ######### WRITE THE SWW FILE #############
3721    # NetCDF file definition
3722    outfile = NetCDFFile(sww_file, 'w')
3723
3724    #Create new file
3725    outfile.institution = 'Geoscience Australia'
3726    outfile.description = 'Converted from XXX'
3727
3728
3729    #For sww compatibility
3730    outfile.smoothing = 'Yes'
3731    outfile.order = 1
3732
3733    #Start time in seconds since the epoch (midnight 1/1/1970)
3734    outfile.starttime = starttime = times[0]
3735
3736
3737    # dimension definitions
3738    outfile.createDimension('number_of_volumes', number_of_volumes)
3739
3740    outfile.createDimension('number_of_vertices', 3)
3741    outfile.createDimension('number_of_points', number_of_points)
3742    outfile.createDimension('number_of_timesteps', number_of_times)
3743
3744    # variable definitions
3745    outfile.createVariable('x', precision, ('number_of_points',))
3746    outfile.createVariable('y', precision, ('number_of_points',))
3747    outfile.createVariable('elevation', precision, ('number_of_points',))
3748
3749    #FIXME: Backwards compatibility
3750    outfile.createVariable('z', precision, ('number_of_points',))
3751    #################################
3752
3753    outfile.createVariable('volumes', Int, ('number_of_volumes',
3754                                            'number_of_vertices'))
3755
3756    outfile.createVariable('time', precision,
3757                           ('number_of_timesteps',))
3758
3759    outfile.createVariable('stage', precision,
3760                           ('number_of_timesteps',
3761                            'number_of_points'))
3762
3763    outfile.createVariable('xmomentum', precision,
3764                           ('number_of_timesteps',
3765                            'number_of_points'))
3766
3767    outfile.createVariable('ymomentum', precision,
3768                           ('number_of_timesteps',
3769                            'number_of_points'))
3770
3771    #Store
3772    from anuga.coordinate_transforms.redfearn import redfearn
3773    x = zeros(number_of_points, Float)  #Easting
3774    y = zeros(number_of_points, Float)  #Northing
3775
3776    if verbose: print 'Making triangular grid'
3777    #Get zone of 1st point.
3778    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3779
3780    vertices = {}
3781    i = 0
3782    for k, lat in enumerate(latitudes):
3783        for l, lon in enumerate(longitudes):
3784
3785            vertices[l,k] = i
3786
3787            zone, easting, northing = redfearn(lat,lon)
3788
3789            msg = 'Zone boundary crossed at longitude =', lon
3790            #assert zone == refzone, msg
3791            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3792            x[i] = easting
3793            y[i] = northing
3794            i += 1
3795
3796
3797    #Construct 2 triangles per 'rectangular' element
3798    volumes = []
3799    for l in range(number_of_longitudes-1):    #X direction
3800        for k in range(number_of_latitudes-1): #Y direction
3801            v1 = vertices[l,k+1]
3802            v2 = vertices[l,k]
3803            v3 = vertices[l+1,k+1]
3804            v4 = vertices[l+1,k]
3805
3806            #Note, this is different to the ferrit2sww code
3807            #since the order of the lats is reversed.
3808            volumes.append([v1,v3,v2]) #Upper element
3809            volumes.append([v4,v2,v3]) #Lower element
3810
3811    volumes = array(volumes)
3812
3813    geo_ref = Geo_reference(refzone,min(x),min(y))
3814    geo_ref.write_NetCDF(outfile)
3815
3816    # This will put the geo ref in the middle
3817    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3818
3819
3820    if verbose:
3821        print '------------------------------------------------'
3822        print 'More Statistics:'
3823        print '  Extent (/lon):'
3824        print '    x in [%f, %f], len(lat) == %d'\
3825              %(min(x), max(x),
3826                len(x))
3827        print '    y in [%f, %f], len(lon) == %d'\
3828              %(min(y), max(y),
3829                len(y))
3830        print 'geo_ref: ',geo_ref
3831
3832    z = resize(bath_grid,outfile.variables['z'][:].shape)
3833    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3834    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3835    outfile.variables['z'][:] = z
3836    outfile.variables['elevation'][:] = z  #FIXME HACK
3837    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3838
3839    # do this to create an ok sww file.
3840    #outfile.variables['time'][:] = [0]   #Store time relative
3841    #outfile.variables['stage'] = z
3842    # put the next line up in the code after outfile.order = 1
3843    #number_of_times = 1
3844
3845    stage = outfile.variables['stage']
3846    xmomentum = outfile.variables['xmomentum']
3847    ymomentum = outfile.variables['ymomentum']
3848
3849    outfile.variables['time'][:] = times   #Store time relative
3850
3851    if verbose: print 'Converting quantities'
3852    n = number_of_times
3853    for j in range(number_of_times):
3854        # load in files
3855        elevation_meta, elevation_grid = \
3856           _read_asc(elevation_dir + os.sep + elevation_files[j])
3857
3858        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3859        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3860
3861        #print "elevation_grid",elevation_grid
3862        #cut matrix to desired size
3863        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3864        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3865        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3866        #print "elevation_grid",elevation_grid
3867        # handle missing values
3868        missing = (elevation_grid == elevation_meta['NODATA_value'])
3869        if sometrue (missing):
3870            if fail_on_NaN:
3871                msg = 'File %s contains missing values'\
3872                      %(elevation_files[j])
3873                raise DataMissingValuesError, msg
3874            else:
3875                elevation_grid = elevation_grid*(missing==0) + \
3876                                 missing*elevation_NaN_filler
3877
3878
3879        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3880        i = 0
3881        for k in range(number_of_latitudes):      #Y direction
3882            for l in range(number_of_longitudes): #X direction
3883                w = zscale*elevation_grid[k,l] + mean_stage
3884                stage[j,i] = w
3885                h = w - z[i]
3886                xmomentum[j,i] = u_momentum_grid[k,l]*h
3887                ymomentum[j,i] = v_momentum_grid[k,l]*h
3888                i += 1
3889    outfile.close()
3890
3891def _get_min_max_indexes(latitudes,longitudes,
3892                        minlat=None, maxlat=None,
3893                        minlon=None, maxlon=None):
3894    """
3895    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3896    specified with min/max lat/long
3897
3898    Think of the latitudes and longitudes describing a 2d surface.
3899    The area returned is, if possible, just big enough to cover the
3900    inputed max/min area. (This will not be possible if the max/min area
3901    has a section outside of the latitudes/longitudes area.)
3902
3903    assume latitudess & longitudes are sorted,
3904    long - from low to high (west to east, eg 148 - 151)
3905    lat - from high to low (north to south, eg -35 - -38)
3906    """
3907
3908    # reverse order of lat, so it's in ascending order
3909    try:
3910        latitudes = latitudes.tolist()
3911    except:
3912        pass
3913   
3914    latitudes.reverse()
3915   
3916    largest_lat_index = len(latitudes)-1
3917    #Cut out a smaller extent.
3918    if minlat == None:
3919        lat_min_index = 0
3920    else:
3921        lat_min_index = searchsorted(latitudes, minlat)-1
3922        if lat_min_index <0:
3923            lat_min_index = 0
3924
3925
3926    if maxlat == None:
3927        lat_max_index = largest_lat_index #len(latitudes)
3928    else:
3929        lat_max_index = searchsorted(latitudes, maxlat)
3930        if lat_max_index > largest_lat_index:
3931            lat_max_index = largest_lat_index
3932
3933    if minlon == None:
3934        lon_min_index = 0
3935    else:
3936        lon_min_index = searchsorted(longitudes, minlon)-1
3937        if lon_min_index <0:
3938            lon_min_index = 0
3939
3940    if maxlon == None:
3941        lon_max_index = len(longitudes)
3942    else:
3943        lon_max_index = searchsorted(longitudes, maxlon)
3944
3945    #Take into account that the latitude list was reversed
3946    latitudes.reverse() # Python passes by reference, need to swap back
3947    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3948                                   largest_lat_index - lat_min_index
3949    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3950    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3951
3952    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3953
3954
3955def _read_asc(filename, verbose=False):
3956    """Read esri file from the following ASCII format (.asc)
3957
3958    Example:
3959
3960    ncols         3121
3961    nrows         1800
3962    xllcorner     722000
3963    yllcorner     5893000
3964    cellsize      25
3965    NODATA_value  -9999
3966    138.3698 137.4194 136.5062 135.5558 ..........
3967
3968    """
3969
3970    datafile = open(filename)
3971
3972    if verbose: print 'Reading DEM from %s' %(filename)
3973    lines = datafile.readlines()
3974    datafile.close()
3975
3976    if verbose: print 'Got', len(lines), ' lines'
3977
3978    ncols = int(lines.pop(0).split()[1].strip())
3979    nrows = int(lines.pop(0).split()[1].strip())
3980    xllcorner = float(lines.pop(0).split()[1].strip())
3981    yllcorner = float(lines.pop(0).split()[1].strip())
3982    cellsize = float(lines.pop(0).split()[1].strip())
3983    NODATA_value = float(lines.pop(0).split()[1].strip())
3984
3985    assert len(lines) == nrows
3986
3987    #Store data
3988    grid = []
3989
3990    n = len(lines)
3991    for i, line in enumerate(lines):
3992        cells = line.split()
3993        assert len(cells) == ncols
3994        grid.append(array([float(x) for x in cells]))
3995    grid = array(grid)
3996
3997    return {'xllcorner':xllcorner,
3998            'yllcorner':yllcorner,
3999            'cellsize':cellsize,
4000            'NODATA_value':NODATA_value}, grid
4001
4002
4003
4004    ####  URS 2 SWW  ###
4005
4006lon_name = 'LON'
4007lat_name = 'LAT'
4008time_name = 'TIME'
4009precision = Float # So if we want to change the precision its done here       
4010class Write_nc:
4011    """
4012    Write an nc file.
4013   
4014    Note, this should be checked to meet cdc netcdf conventions for gridded
4015    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4016   
4017    """
4018    def __init__(self,
4019                 quantity_name,
4020                 file_name,
4021                 time_step_count,
4022                 time_step,
4023                 lon,
4024                 lat):
4025        """
4026        time_step_count is the number of time steps.
4027        time_step is the time step size
4028       
4029        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4030        """
4031        self.quantity_name = quantity_name
4032        quantity_units= {'HA':'METERS',
4033                              'UA':'METERS/SECOND',
4034                              'VA':'METERS/SECOND'}
4035       
4036        #self.file_name = file_name
4037        self.time_step_count = time_step_count
4038        self.time_step = time_step
4039
4040        # NetCDF file definition
4041        self.outfile = NetCDFFile(file_name, 'w')
4042        outfile = self.outfile       
4043
4044        #Create new file
4045        nc_lon_lat_header(outfile, lon, lat)
4046   
4047        # TIME
4048        outfile.createDimension(time_name, None)
4049        outfile.createVariable(time_name, precision, (time_name,))
4050
4051        #QUANTITY
4052        outfile.createVariable(self.quantity_name, precision,
4053                               (time_name, lat_name, lon_name))
4054        outfile.variables[self.quantity_name].missing_value=-1.e+034
4055        outfile.variables[self.quantity_name].units= \
4056                                 quantity_units[self.quantity_name]
4057        outfile.variables[lon_name][:]= ensure_numeric(lon)
4058        outfile.variables[lat_name][:]= ensure_numeric(lat)
4059
4060        #Assume no one will be wanting to read this, while we are writing
4061        #outfile.close()
4062       
4063    def store_timestep(self, quantity_slice):
4064        """
4065        quantity_slice is the data to be stored at this time step
4066        """
4067       
4068        outfile = self.outfile
4069       
4070        # Get the variables
4071        time = outfile.variables[time_name]
4072        quantity = outfile.variables[self.quantity_name]
4073           
4074        i = len(time)
4075
4076        #Store time
4077        time[i] = i*self.time_step #self.domain.time
4078        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4079                                           # And m/s to cm/sec
4080       
4081    def close(self):
4082        self.outfile.close()
4083
4084def urs2sww(basename_in='o', basename_out=None, verbose=False,
4085            remove_nc_files=True,
4086            minlat=None, maxlat=None,
4087            minlon= None, maxlon=None,
4088            mint=None, maxt=None,
4089            mean_stage=0,
4090            origin = None,
4091            zscale=1,
4092            fail_on_NaN=True,
4093            NaN_filler=0,
4094            elevation=None):
4095    """
4096    Convert URS C binary format for wave propagation to
4097    sww format native to abstract_2d_finite_volumes.
4098
4099    Specify only basename_in and read files of the form
4100    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4101    relative height, x-velocity and y-velocity, respectively.
4102
4103    Also convert latitude and longitude to UTM. All coordinates are
4104    assumed to be given in the GDA94 datum. The latitude and longitude
4105    information is for  a grid.
4106
4107    min's and max's: If omitted - full extend is used.
4108    To include a value min may equal it, while max must exceed it.
4109    Lat and lon are assumed to be in decimal degrees.
4110    NOTE: minlon is the most east boundary.
4111   
4112    origin is a 3-tuple with geo referenced
4113    UTM coordinates (zone, easting, northing)
4114    It will be the origin of the sww file. This shouldn't be used,
4115    since all of anuga should be able to handle an arbitary origin.
4116
4117
4118    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4119    which means that latitude is the fastest
4120    varying dimension (row major order, so to speak)
4121
4122    In URS C binary the latitudes and longitudes are in assending order.
4123    """
4124   
4125    if basename_out == None:
4126        basename_out = basename_in
4127    files_out = urs2nc(basename_in, basename_out)
4128    ferret2sww(basename_out,
4129               minlat=minlat,
4130               maxlat=maxlat,
4131               minlon=minlon,
4132               maxlon=maxlon,
4133               mint=mint,
4134               maxt=maxt,
4135               mean_stage=mean_stage,
4136               origin=origin,
4137               zscale=zscale,
4138               fail_on_NaN=fail_on_NaN,
4139               NaN_filler=NaN_filler,
4140               inverted_bathymetry=True,
4141               verbose=verbose)
4142    #print "files_out",files_out
4143    if remove_nc_files:
4144        for file_out in files_out:
4145            os.remove(file_out)
4146   
4147def urs2nc(basename_in = 'o', basename_out = 'urs'):
4148    """
4149    Convert the 3 urs files to 4 nc files.
4150
4151    The name of the urs file names must be;
4152    [basename_in]-z-mux
4153    [basename_in]-e-mux
4154    [basename_in]-n-mux
4155   
4156    """
4157    files_in = [basename_in+'-z-mux',
4158                basename_in+'-e-mux',
4159                basename_in+'-n-mux']
4160    files_out = [basename_out+'_ha.nc',
4161                 basename_out+'_ua.nc',
4162                 basename_out+'_va.nc']
4163    quantities = ['HA','UA','VA']
4164
4165    for file_name in files_in:
4166        if os.access(file_name, os.F_OK) == 0 :
4167            msg = 'File %s does not exist or is not accessible' %file_name
4168            raise IOError, msg
4169       
4170    hashed_elevation = None
4171    for file_in, file_out, quantity in map(None, files_in,
4172                                           files_out,
4173                                           quantities):
4174        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4175                                         file_out,
4176                                         quantity)
4177        #print "lonlatdep", lonlatdep
4178        if hashed_elevation == None:
4179            elevation_file = basename_out+'_e.nc'
4180            write_elevation_nc(elevation_file,
4181                                lon,
4182                                lat,
4183                                depth)
4184            hashed_elevation = myhash(lonlatdep)
4185        else:
4186            msg = "The elevation information in the mux files is inconsistent"
4187            assert hashed_elevation == myhash(lonlatdep), msg
4188    files_out.append(elevation_file)
4189    return files_out
4190   
4191def _binary_c2nc(file_in, file_out, quantity):
4192    """
4193    Reads in a quantity urs file and writes a quantity nc file.
4194    additionally, returns the depth and lat, long info,
4195    so it can be written to a file.
4196    """
4197    columns = 3 # long, lat , depth
4198    mux_file = open(file_in, 'rb')
4199   
4200    # Number of points/stations
4201    (points_num,)= unpack('i',mux_file.read(4))
4202
4203    # nt, int - Number of time steps
4204    (time_step_count,)= unpack('i',mux_file.read(4))
4205
4206    #dt, float - time step, seconds
4207    (time_step,) = unpack('f', mux_file.read(4))
4208   
4209    msg = "Bad data in the mux file."
4210    if points_num < 0:
4211        mux_file.close()
4212        raise ANUGAError, msg
4213    if time_step_count < 0:
4214        mux_file.close()
4215        raise ANUGAError, msg
4216    if time_step < 0:
4217        mux_file.close()
4218        raise ANUGAError, msg
4219   
4220    lonlatdep = p_array.array('f')
4221    lonlatdep.read(mux_file, columns * points_num)
4222    lonlatdep = array(lonlatdep, typecode=Float)   
4223    lonlatdep = reshape(lonlatdep, (points_num, columns))
4224   
4225    lon, lat, depth = lon_lat2grid(lonlatdep)
4226    lon_sorted = list(lon)
4227    lon_sorted.sort()
4228
4229    if not lon == lon_sorted:
4230        msg = "Longitudes in mux file are not in ascending order"
4231        raise IOError, msg
4232    lat_sorted = list(lat)
4233    lat_sorted.sort()
4234
4235    if not lat == lat_sorted:
4236        msg = "Latitudes in mux file are not in ascending order"
4237   
4238    nc_file = Write_nc(quantity,
4239                       file_out,
4240                       time_step_count,
4241                       time_step,
4242                       lon,
4243                       lat)
4244
4245    for i in range(time_step_count):
4246        #Read in a time slice  from mux file 
4247        hz_p_array = p_array.array('f')
4248        hz_p_array.read(mux_file, points_num)
4249        hz_p = array(hz_p_array, typecode=Float)
4250        hz_p = reshape(hz_p, (len(lon), len(lat)))
4251        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4252
4253        #write time slice to nc file
4254        nc_file.store_timestep(hz_p)
4255    mux_file.close()
4256    nc_file.close()
4257
4258    return lonlatdep, lon, lat, depth
4259   
4260
4261def write_elevation_nc(file_out, lon, lat, depth_vector):
4262    """
4263    Write an nc elevation file.
4264    """
4265   
4266    # NetCDF file definition
4267    outfile = NetCDFFile(file_out, 'w')
4268
4269    #Create new file
4270    nc_lon_lat_header(outfile, lon, lat)
4271   
4272    # ELEVATION
4273    zname = 'ELEVATION'
4274    outfile.createVariable(zname, precision, (lat_name, lon_name))
4275    outfile.variables[zname].units='CENTIMETERS'
4276    outfile.variables[zname].missing_value=-1.e+034
4277
4278    outfile.variables[lon_name][:]= ensure_numeric(lon)
4279    outfile.variables[lat_name][:]= ensure_numeric(lat)
4280
4281    depth = reshape(depth_vector, ( len(lat), len(lon)))
4282    outfile.variables[zname][:]= depth
4283   
4284    outfile.close()
4285   
4286def nc_lon_lat_header(outfile, lon, lat):
4287    """
4288    outfile is the netcdf file handle.
4289    lon - a list/array of the longitudes
4290    lat - a list/array of the latitudes
4291    """
4292   
4293    outfile.institution = 'Geoscience Australia'
4294    outfile.description = 'Converted from URS binary C'
4295   
4296    # Longitude
4297    outfile.createDimension(lon_name, len(lon))
4298    outfile.createVariable(lon_name, precision, (lon_name,))
4299    outfile.variables[lon_name].point_spacing='uneven'
4300    outfile.variables[lon_name].units='degrees_east'
4301    outfile.variables[lon_name].assignValue(lon)
4302
4303
4304    # Latitude
4305    outfile.createDimension(lat_name, len(lat))
4306    outfile.createVariable(lat_name, precision, (lat_name,))
4307    outfile.variables[lat_name].point_spacing='uneven'
4308    outfile.variables[lat_name].units='degrees_north'
4309    outfile.variables[lat_name].assignValue(lat)
4310
4311
4312   
4313def lon_lat2grid(long_lat_dep):
4314    """
4315    given a list of points that are assumed to be an a grid,
4316    return the long's and lat's of the grid.
4317    long_lat_dep is an array where each row is a position.
4318    The first column is longitudes.
4319    The second column is latitudes.
4320
4321    The latitude is the fastest varying dimension - in mux files
4322    """
4323    LONG = 0
4324    LAT = 1
4325    QUANTITY = 2
4326
4327    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4328   
4329    num_points = long_lat_dep.shape[0]
4330    this_rows_long = long_lat_dep[0,LONG]
4331
4332    # Count the length of unique latitudes
4333    i = 0
4334    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4335        i += 1
4336    # determine the lats and longsfrom the grid
4337    lat = long_lat_dep[:i, LAT]       
4338    long = long_lat_dep[::i, LONG]
4339   
4340    lenlong = len(long)
4341    lenlat = len(lat)
4342    #print 'len lat', lat, len(lat)
4343    #print 'len long', long, len(long)
4344         
4345    msg = 'Input data is not gridded'     
4346    assert num_points % lenlat == 0, msg
4347    assert num_points % lenlong == 0, msg
4348         
4349    # Test that data is gridded       
4350    for i in range(lenlong):
4351        msg = 'Data is not gridded.  It must be for this operation'
4352        first = i*lenlat
4353        last = first + lenlat
4354               
4355        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4356        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4357   
4358   
4359#    print 'range long', min(long), max(long)
4360#    print 'range lat', min(lat), max(lat)
4361#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4362#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4363   
4364   
4365   
4366    msg = 'Out of range latitudes/longitudes'
4367    for l in lat:assert -90 < l < 90 , msg
4368    for l in long:assert -180 < l < 180 , msg
4369
4370    # Changing quantity from lat being the fastest varying dimension to
4371    # long being the fastest varying dimension
4372    # FIXME - make this faster/do this a better way
4373    # use numeric transpose, after reshaping the quantity vector
4374#    quantity = zeros(len(long_lat_dep), Float)
4375    quantity = zeros(num_points, Float)
4376   
4377#    print 'num',num_points
4378    for lat_i, _ in enumerate(lat):
4379        for long_i, _ in enumerate(long):
4380            q_index = lat_i*lenlong+long_i
4381            lld_index = long_i*lenlat+lat_i
4382#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4383            temp = long_lat_dep[lld_index, QUANTITY]
4384            quantity[q_index] = temp
4385           
4386    return long, lat, quantity
4387
4388    ####  END URS 2 SWW  ###     
4389#-------------------------------------------------------------
4390if __name__ == "__main__":
4391    pass
4392
Note: See TracBrowser for help on using the repository browser.