source: anuga_core/source/anuga/shallow_water/data_manager.py @ 3975

Last change on this file since 3975 was 3975, checked in by duncan, 17 years ago

extend tests of urs2sww.

File size: 134.5 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
66     searchsorted, zeros, allclose, around, reshape, transpose
67from Scientific.IO.NetCDF import NetCDFFile
68
69
70from anuga.coordinate_transforms.geo_reference import Geo_reference
71from anuga.geospatial_data.geospatial_data import Geospatial_data
72from anuga.config import minimum_storable_height as default_minimum_storable_height
73from anuga.utilities.numerical_tools import ensure_numeric
74from anuga.caching.caching import myhash
75from anuga.utilities.anuga_exceptions import ANUGAError
76
77def make_filename(s):
78    """Transform argument string into a suitable filename
79    """
80
81    s = s.strip()
82    s = s.replace(' ', '_')
83    s = s.replace('(', '')
84    s = s.replace(')', '')
85    s = s.replace('__', '_')
86
87    return s
88
89
90def check_dir(path, verbose=None):
91    """Check that specified path exists.
92    If path does not exist it will be created if possible
93
94    USAGE:
95       checkdir(path, verbose):
96
97    ARGUMENTS:
98        path -- Directory
99        verbose -- Flag verbose output (default: None)
100
101    RETURN VALUE:
102        Verified path including trailing separator
103
104    """
105
106    import os, sys
107    import os.path
108
109    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
110        unix = 0
111    else:
112        unix = 1
113
114
115    if path[-1] != os.sep:
116        path = path + os.sep  # Add separator for directories
117
118    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
119    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
120        try:
121            exitcode=os.mkdir(path)
122
123            # Change access rights if possible
124            #
125            if unix:
126                exitcode=os.system('chmod 775 '+path)
127            else:
128                pass  # FIXME: What about acces rights under Windows?
129
130            if verbose: print 'MESSAGE: Directory', path, 'created.'
131
132        except:
133            print 'WARNING: Directory', path, 'could not be created.'
134            if unix:
135                path = '/tmp/'
136            else:
137                path = 'C:'
138
139            print 'Using directory %s instead' %path
140
141    return(path)
142
143
144
145def del_dir(path):
146    """Recursively delete directory path and all its contents
147    """
148
149    import os
150
151    if os.path.isdir(path):
152        for file in os.listdir(path):
153            X = os.path.join(path, file)
154
155
156            if os.path.isdir(X) and not os.path.islink(X):
157                del_dir(X)
158            else:
159                try:
160                    os.remove(X)
161                except:
162                    print "Could not remove file %s" %X
163
164        os.rmdir(path)
165
166
167
168def create_filename(datadir, filename, format, size=None, time=None):
169
170    import os
171    #from anuga.config import data_dir
172
173    FN = check_dir(datadir) + filename
174
175    if size is not None:
176        FN += '_size%d' %size
177
178    if time is not None:
179        FN += '_time%.2f' %time
180
181    FN += '.' + format
182    return FN
183
184
185def get_files(datadir, filename, format, size):
186    """Get all file (names) with gven name, size and format
187    """
188
189    import glob
190
191    import os
192    #from anuga.config import data_dir
193
194    dir = check_dir(datadir)
195
196    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
197    return glob.glob(pattern)
198
199
200
201#Generic class for storing output to e.g. visualisation or checkpointing
202class Data_format:
203    """Generic interface to data formats
204    """
205
206
207    def __init__(self, domain, extension, mode = 'w'):
208        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
209                                        '''   'w' (write)'''+\
210                                        '''   'r' (read)''' +\
211                                        '''   'a' (append)'''
212
213        #Create filename
214        self.filename = create_filename(domain.get_datadir(),
215                                        domain.get_name(), extension)
216
217        #print 'F', self.filename
218        self.timestep = 0
219        self.domain = domain
220       
221
222
223        # Exclude ghosts in case this is a parallel domain
224        self.number_of_nodes = domain.number_of_full_nodes       
225        self.number_of_volumes = domain.number_of_full_triangles
226        #self.number_of_volumes = len(domain)       
227
228
229
230
231        #FIXME: Should we have a general set_precision function?
232
233
234
235#Class for storing output to e.g. visualisation
236class Data_format_sww(Data_format):
237    """Interface to native NetCDF format (.sww) for storing model output
238
239    There are two kinds of data
240
241    1: Constant data: Vertex coordinates and field values. Stored once
242    2: Variable data: Conserved quantities. Stored once per timestep.
243
244    All data is assumed to reside at vertex locations.
245    """
246
247
248    def __init__(self, domain, mode = 'w',\
249                 max_size = 2000000000,
250                 recursion = False):
251        from Scientific.IO.NetCDF import NetCDFFile
252        from Numeric import Int, Float, Float32
253
254        self.precision = Float32 #Use single precision for quantities
255        if hasattr(domain, 'max_size'):
256            self.max_size = domain.max_size #file size max is 2Gig
257        else:
258            self.max_size = max_size
259        self.recursion = recursion
260        self.mode = mode
261
262        Data_format.__init__(self, domain, 'sww', mode)
263
264        if hasattr(domain, 'minimum_storable_height'):
265            self.minimum_storable_height =  domain.minimum_storable_height
266        else:
267            self.minimum_storable_height = default_minimum_storable_height
268
269        # NetCDF file definition
270        fid = NetCDFFile(self.filename, mode)
271
272        if mode == 'w':
273
274            #Create new file
275            fid.institution = 'Geoscience Australia'
276            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
277
278            if domain.smooth:
279                fid.smoothing = 'Yes'
280            else:
281                fid.smoothing = 'No'
282
283            fid.order = domain.default_order
284
285            if hasattr(domain, 'texture'):
286                fid.texture = domain.texture
287            #else:
288            #    fid.texture = 'None'
289
290            #Reference point
291            #Start time in seconds since the epoch (midnight 1/1/1970)
292            #FIXME: Use Georef
293            fid.starttime = domain.starttime
294
295            # dimension definitions
296            fid.createDimension('number_of_volumes', self.number_of_volumes)
297            fid.createDimension('number_of_vertices', 3)
298
299            if domain.smooth is True:
300                #fid.createDimension('number_of_points', len(domain.vertexlist))
301                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
302
303                # FIXME(Ole): This will cause sww files for paralle domains to
304                # have ghost nodes stored (but not used by triangles).
305                # To clean this up, we have to change get_vertex_values and friends in
306                # quantity.py (but I can't be bothered right now)
307            else:
308                fid.createDimension('number_of_points', 3*self.number_of_volumes)
309
310            fid.createDimension('number_of_timesteps', None) #extensible
311
312            # variable definitions
313            fid.createVariable('x', self.precision, ('number_of_points',))
314            fid.createVariable('y', self.precision, ('number_of_points',))
315            fid.createVariable('elevation', self.precision, ('number_of_points',))
316            if domain.geo_reference is not None:
317                domain.geo_reference.write_NetCDF(fid)
318
319            #FIXME: Backwards compatibility
320            fid.createVariable('z', self.precision, ('number_of_points',))
321            #################################
322
323            fid.createVariable('volumes', Int, ('number_of_volumes',
324                                                'number_of_vertices'))
325
326            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
327                                               # close to each other may appear as the same step
328                               ('number_of_timesteps',))
329
330            fid.createVariable('stage', self.precision,
331                               ('number_of_timesteps',
332                                'number_of_points'))
333
334            fid.createVariable('xmomentum', self.precision,
335                               ('number_of_timesteps',
336                                'number_of_points'))
337
338            fid.createVariable('ymomentum', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342        #Close
343        fid.close()
344
345
346    def store_connectivity(self):
347        """Specialisation of store_connectivity for net CDF format
348
349        Writes x,y,z coordinates of triangles constituting
350        the bed elevation.
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354
355        from Numeric import concatenate, Int
356
357        domain = self.domain
358
359        #Get NetCDF
360        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
361
362        # Get the variables
363        x = fid.variables['x']
364        y = fid.variables['y']
365        z = fid.variables['elevation']
366
367        volumes = fid.variables['volumes']
368
369        # Get X, Y and bed elevation Z
370        Q = domain.quantities['elevation']
371        X,Y,Z,V = Q.get_vertex_values(xy=True,
372                                      precision=self.precision)
373
374        volumes[:] = V.astype(volumes.typecode())
375        x[:] = X
376        y[:] = Y
377        z[:] = Z
378
379        #FIXME: Backwards compatibility
380        z = fid.variables['z']
381        z[:] = Z
382        ################################
383
384
385
386        #Close
387        fid.close()
388
389    def store_timestep(self, names):
390        """Store time and named quantities to file
391        """
392        from Scientific.IO.NetCDF import NetCDFFile
393        import types
394        from time import sleep
395        from os import stat
396
397        from Numeric import choose
398
399        #Get NetCDF
400        retries = 0
401        file_open = False
402        while not file_open and retries < 10:
403            try:
404                fid = NetCDFFile(self.filename, 'a') #Open existing file
405            except IOError:
406                #This could happen if someone was reading the file.
407                #In that case, wait a while and try again
408                msg = 'Warning (store_timestep): File %s could not be opened'\
409                      %self.filename
410                msg += ' - trying step %s again' %self.domain.time
411                print msg
412                retries += 1
413                sleep(1)
414            else:
415                file_open = True
416
417        if not file_open:
418            msg = 'File %s could not be opened for append' %self.filename
419            raise DataFileNotOpenError, msg
420
421
422
423        #Check to see if the file is already too big:
424        time = fid.variables['time']
425        i = len(time)+1
426        file_size = stat(self.filename)[6]
427        file_size_increase =  file_size/i
428        if file_size + file_size_increase > self.max_size*(2**self.recursion):
429            #in order to get the file name and start time correct,
430            #I change the domian.filename and domain.starttime.
431            #This is the only way to do this without changing
432            #other modules (I think).
433
434            #write a filename addon that won't break swollens reader
435            #(10.sww is bad)
436            filename_ext = '_time_%s'%self.domain.time
437            filename_ext = filename_ext.replace('.', '_')
438            #remember the old filename, then give domain a
439            #name with the extension
440            old_domain_filename = self.domain.get_name()
441            if not self.recursion:
442                self.domain.set_name(old_domain_filename+filename_ext)
443
444
445            #change the domain starttime to the current time
446            old_domain_starttime = self.domain.starttime
447            self.domain.starttime = self.domain.time
448
449            #build a new data_structure.
450            next_data_structure=\
451                Data_format_sww(self.domain, mode=self.mode,\
452                                max_size = self.max_size,\
453                                recursion = self.recursion+1)
454            if not self.recursion:
455                print '    file_size = %s'%file_size
456                print '    saving file to %s'%next_data_structure.filename
457            #set up the new data_structure
458            self.domain.writer = next_data_structure
459
460            #FIXME - could be cleaner to use domain.store_timestep etc.
461            next_data_structure.store_connectivity()
462            next_data_structure.store_timestep(names)
463            fid.sync()
464            fid.close()
465
466            #restore the old starttime and filename
467            self.domain.starttime = old_domain_starttime
468            self.domain.set_name(old_domain_filename)           
469        else:
470            self.recursion = False
471            domain = self.domain
472
473            # Get the variables
474            time = fid.variables['time']
475            stage = fid.variables['stage']
476            xmomentum = fid.variables['xmomentum']
477            ymomentum = fid.variables['ymomentum']
478            i = len(time)
479
480            #Store time
481            time[i] = self.domain.time
482
483
484            if type(names) not in [types.ListType, types.TupleType]:
485                names = [names]
486
487            for name in names:
488                # Get quantity
489                Q = domain.quantities[name]
490                A,V = Q.get_vertex_values(xy = False,
491                                          precision = self.precision)
492
493
494                #FIXME: Make this general (see below)
495                if name == 'stage':
496                    z = fid.variables['elevation']
497                    #print z[:]
498                    #print A-z[:]
499
500
501                    A = choose(A-z[:] >= self.minimum_storable_height,
502                               (z[:], A))
503                    stage[i,:] = A.astype(self.precision)
504                elif name == 'xmomentum':
505                    xmomentum[i,:] = A.astype(self.precision)
506                elif name == 'ymomentum':
507                    ymomentum[i,:] = A.astype(self.precision)
508
509                #As in....
510            #eval( name + '[i,:] = A.astype(self.precision)' )
511            #FIXME: But we need a UNIT test for that before refactoring
512
513
514
515            #Flush and close
516            fid.sync()
517            fid.close()
518
519
520
521#Class for handling checkpoints data
522class Data_format_cpt(Data_format):
523    """Interface to native NetCDF format (.cpt)
524    """
525
526
527    def __init__(self, domain, mode = 'w'):
528        from Scientific.IO.NetCDF import NetCDFFile
529        from Numeric import Int, Float, Float
530
531        self.precision = Float #Use full precision
532
533        Data_format.__init__(self, domain, 'sww', mode)
534
535
536        # NetCDF file definition
537        fid = NetCDFFile(self.filename, mode)
538
539        if mode == 'w':
540            #Create new file
541            fid.institution = 'Geoscience Australia'
542            fid.description = 'Checkpoint data'
543            #fid.smooth = domain.smooth
544            fid.order = domain.default_order
545
546            # dimension definitions
547            fid.createDimension('number_of_volumes', self.number_of_volumes)
548            fid.createDimension('number_of_vertices', 3)
549
550            #Store info at all vertices (no smoothing)
551            fid.createDimension('number_of_points', 3*self.number_of_volumes)
552            fid.createDimension('number_of_timesteps', None) #extensible
553
554            # variable definitions
555
556            #Mesh
557            fid.createVariable('x', self.precision, ('number_of_points',))
558            fid.createVariable('y', self.precision, ('number_of_points',))
559
560
561            fid.createVariable('volumes', Int, ('number_of_volumes',
562                                                'number_of_vertices'))
563
564            fid.createVariable('time', self.precision,
565                               ('number_of_timesteps',))
566
567            #Allocate space for all quantities
568            for name in domain.quantities.keys():
569                fid.createVariable(name, self.precision,
570                                   ('number_of_timesteps',
571                                    'number_of_points'))
572
573        #Close
574        fid.close()
575
576
577    def store_checkpoint(self):
578        """
579        Write x,y coordinates of triangles.
580        Write connectivity (
581        constituting
582        the bed elevation.
583        """
584
585        from Scientific.IO.NetCDF import NetCDFFile
586
587        from Numeric import concatenate
588
589        domain = self.domain
590
591        #Get NetCDF
592        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
593
594        # Get the variables
595        x = fid.variables['x']
596        y = fid.variables['y']
597
598        volumes = fid.variables['volumes']
599
600        # Get X, Y and bed elevation Z
601        Q = domain.quantities['elevation']
602        X,Y,Z,V = Q.get_vertex_values(xy=True,
603                      precision = self.precision)
604
605
606
607        x[:] = X.astype(self.precision)
608        y[:] = Y.astype(self.precision)
609        z[:] = Z.astype(self.precision)
610
611        volumes[:] = V
612
613        #Close
614        fid.close()
615
616
617    def store_timestep(self, name):
618        """Store time and named quantity to file
619        """
620        from Scientific.IO.NetCDF import NetCDFFile
621        from time import sleep
622
623        #Get NetCDF
624        retries = 0
625        file_open = False
626        while not file_open and retries < 10:
627            try:
628                fid = NetCDFFile(self.filename, 'a') #Open existing file
629            except IOError:
630                #This could happen if someone was reading the file.
631                #In that case, wait a while and try again
632                msg = 'Warning (store_timestep): File %s could not be opened'\
633                  %self.filename
634                msg += ' - trying again'
635                print msg
636                retries += 1
637                sleep(1)
638            else:
639                file_open = True
640
641        if not file_open:
642            msg = 'File %s could not be opened for append' %self.filename
643            raise DataFileNotOPenError, msg
644
645
646        domain = self.domain
647
648        # Get the variables
649        time = fid.variables['time']
650        stage = fid.variables['stage']
651        i = len(time)
652
653        #Store stage
654        time[i] = self.domain.time
655
656        # Get quantity
657        Q = domain.quantities[name]
658        A,V = Q.get_vertex_values(xy=False,
659                  precision = self.precision)
660
661        stage[i,:] = A.astype(self.precision)
662
663        #Flush and close
664        fid.sync()
665        fid.close()
666
667
668
669
670
671#Function for storing xya output
672#FIXME Not done yet for this version
673#This is obsolete.  Use geo_spatial_data instead
674class Data_format_xya(Data_format):
675    """Generic interface to data formats
676    """
677
678
679    def __init__(self, domain, mode = 'w'):
680        from Scientific.IO.NetCDF import NetCDFFile
681        from Numeric import Int, Float, Float32
682
683        self.precision = Float32 #Use single precision
684
685        Data_format.__init__(self, domain, 'xya', mode)
686
687
688
689    #FIXME -This is the old xya format
690    def store_all(self):
691        """Specialisation of store all for xya format
692
693        Writes x,y,z coordinates of triangles constituting
694        the bed elevation.
695        """
696
697        from Numeric import concatenate
698
699        domain = self.domain
700
701        fd = open(self.filename, 'w')
702
703
704        if domain.smooth is True:
705            number_of_points =  len(domain.vertexlist)
706        else:
707            number_of_points = 3*self.number_of_volumes
708
709        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
710
711        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
712                 " # <vertex #> <x> <y> [attributes]" + "\n")
713
714
715        # Get X, Y, bed elevation and friction (index=0,1)
716        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
717                                           indices = (0,1), precision = self.precision)
718
719        bed_eles = A[:,0]
720        fricts = A[:,1]
721
722        # Get stage (index=0)
723        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
724                                       indices = (0,), precision = self.precision)
725
726        stages = B[:,0]
727
728        #<vertex #> <x> <y> [attributes]
729        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
730                                               stages, fricts):
731
732            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
733            fd.write(s)
734
735        #close
736        fd.close()
737
738
739    def store_timestep(self, t, V0, V1, V2):
740        """Store time, water heights (and momentums) to file
741        """
742        pass
743
744
745#### NEED national exposure database
746   
747LAT_TITLE = 'LATITUDE'
748LONG_TITLE = 'LONGITUDE'
749X_TITLE = 'x'
750Y_TITLE = 'y'
751class Exposure_csv:
752    def __init__(self,file_name, latitude_title=LAT_TITLE,
753                 longitude_title=LONG_TITLE, is_x_y_locations=None,
754                 x_title=X_TITLE, y_title=Y_TITLE,
755                 refine_polygon=None):
756        """
757        This class is for handling the exposure csv file.
758        It reads the file in and converts the lats and longs to a geospatial
759        data object.
760        Use the methods to read and write columns.
761
762        The format of the csv files it reads is;
763           The first row is a title row.
764           comma's are the delimiters
765           each column is a 'set' of data
766
767        Feel free to use/expand it to read other csv files.
768           
769           
770        It is not for adding and deleting rows
771       
772        Can geospatial handle string attributes? It's not made for them.
773        Currently it can't load and save string att's.
774
775        So just use geospatial to hold the x, y and georef? Bad, since
776        different att's are in diferent structures.  Not so bad, the info
777        to write if the .csv file is saved is in attribute_dic
778
779        The location info is in the geospatial attribute.
780       
781       
782        """
783        self._file_name = file_name
784        self._geospatial = None #
785
786        # self._attribute_dic is a dictionary.
787        #The keys are the column titles.
788        #The values are lists of column data
789       
790        # self._title_index_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are the index positions of file columns.
793        self._attribute_dic, self._title_index_dic = \
794        self._load_exposure_csv(self._file_name)
795        try:
796            lats = self._attribute_dic[latitude_title]
797            longs = self._attribute_dic[longitude_title]
798           
799        except KeyError:
800            # maybe a warning..
801            #Let's see if this works..
802            if False != is_x_y_locations:
803                is_x_y_locations = True
804            pass
805        else:
806            self._geospatial = Geospatial_data(latitudes = lats,
807                 longitudes = longs)
808
809        if is_x_y_locations is True:
810            if self._geospatial is not None:
811                pass #fixme throw an error
812            try:
813                xs = self._attribute_dic[x_title]
814                ys = self._attribute_dic[y_title]
815                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
816            except KeyError:
817                # maybe a warning..
818                msg = "Could not find location information."
819                raise TitleValueError, msg
820            else:
821                self._geospatial = Geospatial_data(data_points=points)
822           
823        # create a list of points that are in the refining_polygon
824        # described by a list of indexes representing the points
825
826    def __cmp__(self, other):
827        #print "self._attribute_dic",self._attribute_dic
828        #print "other._attribute_dic",other._attribute_dic
829        #print "self._title_index_dic", self._title_index_dic
830        #print "other._title_index_dic", other._title_index_dic
831       
832        #check that a is an instance of this class
833        if isinstance(self, type(other)):
834            result = cmp(self._attribute_dic, other._attribute_dic)
835            if result <>0:
836                return result
837            # The order of the columns is important. Therefore..
838            result = cmp(self._title_index_dic, other._title_index_dic)
839            if result <>0:
840                return result
841            for self_ls, other_ls in map(None,self._attribute_dic, \
842                    other._attribute_dic):
843                result = cmp(self._attribute_dic[self_ls],
844                             other._attribute_dic[other_ls])
845                if result <>0:
846                    return result
847            return 0
848        else:
849            return 1
850   
851    def _load_exposure_csv(self, file_name):
852        """
853        Load in the csv as a dic, title as key and column info as value, .
854        Also, create a dic, title as key and column index as value.
855        """
856        #
857        attribute_dic = {}
858        title_index_dic = {}
859        titles_stripped = [] # list of titles
860       
861        reader = csv.reader(file(file_name))
862
863        # Read in and manipulate the title info
864        titles = reader.next()
865        for i,title in enumerate(titles):
866            titles_stripped.append(title.strip())
867            title_index_dic[title.strip()] = i
868        title_count = len(titles_stripped)       
869        #print "title_index_dic",title_index_dic
870
871       
872        #create a dic of colum values, indexed by column title
873        for line in reader:
874            if len(line) <> title_count:
875                raise IOError #FIXME make this nicer
876            for i, value in enumerate(line):
877                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
878           
879        return attribute_dic, title_index_dic
880
881    def get_column(self, column_name, use_refind_polygon=False):
882        """
883        Given a column name return a list of the column values
884
885        Note, the type of the values will be String!
886        do this to change a list of strings to a list of floats
887        time = [float(x) for x in time]
888       
889        Not implemented:
890        if use_refind_polygon is True, only return values in the
891        refined polygon
892        """
893        if not self._attribute_dic.has_key(column_name):
894            msg = 'Therer is  no column called %s!' %column_name
895            raise TitleValueError, msg
896        return self._attribute_dic[column_name]
897
898
899    def get_value(self, value_column_name,
900                  known_column_name,
901                  known_values,
902                  use_refind_polygon=False):
903        """
904        Do linear interpolation on the known_colum, using the known_value,
905        to return a value of the column_value_name.
906        """
907        pass
908
909
910    def get_location(self, use_refind_polygon=False):
911        """
912        Return a geospatial object which describes the
913        locations of the location file.
914
915        Note, if there is not location info, this returns None.
916       
917        Not implemented:
918        if use_refind_polygon is True, only return values in the
919        refined polygon
920        """
921        return self._geospatial
922
923    def set_column(self, column_name, column_values, overwrite=False):
924        """
925        Add a column to the 'end' (with the right most column being the end)
926        of the csv file.
927
928        Set overwrite to True if you want to overwrite a column.
929       
930        Note, in column_name white space is removed and case is not checked.
931        Precondition
932        The column_name and column_values cannot have comma's in it.
933        """
934       
935        value_row_count = \
936            len(self._attribute_dic[self._title_index_dic.keys()[0]])
937        if len(column_values) <> value_row_count: 
938            msg = 'The number of column values must equal the number of rows.'
939            raise DataMissingValuesError, msg
940       
941        if self._attribute_dic.has_key(column_name):
942            if not overwrite:
943                msg = 'Column name %s already in use!' %column_name
944                raise TitleValueError, msg
945        else:
946            # New title.  Add it to the title index.
947            self._title_index_dic[column_name] = len(self._title_index_dic)
948        self._attribute_dic[column_name] = column_values
949        #print "self._title_index_dic[column_name]",self._title_index_dic
950
951    def save(self, file_name=None):
952        """
953        Save the exposure csv file
954        """
955        if file_name is None:
956            file_name = self._file_name
957       
958        fd = open(file_name,'wb')
959        writer = csv.writer(fd)
960       
961        #Write the title to a cvs file
962        line = [None]* len(self._title_index_dic)
963        for title in self._title_index_dic.iterkeys():
964            line[self._title_index_dic[title]]= title
965        writer.writerow(line)
966       
967        # Write the values to a cvs file
968        value_row_count = \
969            len(self._attribute_dic[self._title_index_dic.keys()[0]])
970        for row_i in range(value_row_count):
971            line = [None]* len(self._title_index_dic)
972            for title in self._title_index_dic.iterkeys():
973                line[self._title_index_dic[title]]= \
974                     self._attribute_dic[title][row_i]
975            writer.writerow(line)
976
977
978#Auxiliary
979def write_obj(filename,x,y,z):
980    """Store x,y,z vectors into filename (obj format)
981       Vectors are assumed to have dimension (M,3) where
982       M corresponds to the number elements.
983       triangles are assumed to be disconnected
984
985       The three numbers in each vector correspond to three vertices,
986
987       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
988
989    """
990    #print 'Writing obj to %s' % filename
991
992    import os.path
993
994    root, ext = os.path.splitext(filename)
995    if ext == '.obj':
996        FN = filename
997    else:
998        FN = filename + '.obj'
999
1000
1001    outfile = open(FN, 'wb')
1002    outfile.write("# Triangulation as an obj file\n")
1003
1004    M, N = x.shape
1005    assert N==3  #Assuming three vertices per element
1006
1007    for i in range(M):
1008        for j in range(N):
1009            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1010
1011    for i in range(M):
1012        base = i*N
1013        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1014
1015    outfile.close()
1016
1017
1018#########################################################
1019#Conversion routines
1020########################################################
1021
1022def sww2obj(basefilename, size):
1023    """Convert netcdf based data output to obj
1024    """
1025    from Scientific.IO.NetCDF import NetCDFFile
1026
1027    from Numeric import Float, zeros
1028
1029    #Get NetCDF
1030    FN = create_filename('.', basefilename, 'sww', size)
1031    print 'Reading from ', FN
1032    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1033
1034
1035    # Get the variables
1036    x = fid.variables['x']
1037    y = fid.variables['y']
1038    z = fid.variables['elevation']
1039    time = fid.variables['time']
1040    stage = fid.variables['stage']
1041
1042    M = size  #Number of lines
1043    xx = zeros((M,3), Float)
1044    yy = zeros((M,3), Float)
1045    zz = zeros((M,3), Float)
1046
1047    for i in range(M):
1048        for j in range(3):
1049            xx[i,j] = x[i+j*M]
1050            yy[i,j] = y[i+j*M]
1051            zz[i,j] = z[i+j*M]
1052
1053    #Write obj for bathymetry
1054    FN = create_filename('.', basefilename, 'obj', size)
1055    write_obj(FN,xx,yy,zz)
1056
1057
1058    #Now read all the data with variable information, combine with
1059    #x,y info and store as obj
1060
1061    for k in range(len(time)):
1062        t = time[k]
1063        print 'Processing timestep %f' %t
1064
1065        for i in range(M):
1066            for j in range(3):
1067                zz[i,j] = stage[k,i+j*M]
1068
1069
1070        #Write obj for variable data
1071        #FN = create_filename(basefilename, 'obj', size, time=t)
1072        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1073        write_obj(FN,xx,yy,zz)
1074
1075
1076def dat2obj(basefilename):
1077    """Convert line based data output to obj
1078    FIXME: Obsolete?
1079    """
1080
1081    import glob, os
1082    from anuga.config import data_dir
1083
1084
1085    #Get bathymetry and x,y's
1086    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1087
1088    from Numeric import zeros, Float
1089
1090    M = len(lines)  #Number of lines
1091    x = zeros((M,3), Float)
1092    y = zeros((M,3), Float)
1093    z = zeros((M,3), Float)
1094
1095    ##i = 0
1096    for i, line in enumerate(lines):
1097        tokens = line.split()
1098        values = map(float,tokens)
1099
1100        for j in range(3):
1101            x[i,j] = values[j*3]
1102            y[i,j] = values[j*3+1]
1103            z[i,j] = values[j*3+2]
1104
1105        ##i += 1
1106
1107
1108    #Write obj for bathymetry
1109    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1110
1111
1112    #Now read all the data files with variable information, combine with
1113    #x,y info
1114    #and store as obj
1115
1116    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1117
1118    for filename in files:
1119        print 'Processing %s' % filename
1120
1121        lines = open(data_dir+os.sep+filename,'r').readlines()
1122        assert len(lines) == M
1123        root, ext = os.path.splitext(filename)
1124
1125        #Get time from filename
1126        i0 = filename.find('_time=')
1127        if i0 == -1:
1128            #Skip bathymetry file
1129            continue
1130
1131        i0 += 6  #Position where time starts
1132        i1 = filename.find('.dat')
1133
1134        if i1 > i0:
1135            t = float(filename[i0:i1])
1136        else:
1137            raise DataTimeError, 'Hmmmm'
1138
1139
1140
1141        ##i = 0
1142        for i, line in enumerate(lines):
1143            tokens = line.split()
1144            values = map(float,tokens)
1145
1146            for j in range(3):
1147                z[i,j] = values[j]
1148
1149            ##i += 1
1150
1151        #Write obj for variable data
1152        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1153
1154
1155def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1156    """Read netcdf filename1, pick timesteps first:step:last and save to
1157    nettcdf file filename2
1158    """
1159    from Scientific.IO.NetCDF import NetCDFFile
1160
1161    #Get NetCDF
1162    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1163    outfile = NetCDFFile(filename2, 'w')  #Open new file
1164
1165
1166    #Copy dimensions
1167    for d in infile.dimensions:
1168        outfile.createDimension(d, infile.dimensions[d])
1169
1170    for name in infile.variables:
1171        var = infile.variables[name]
1172        outfile.createVariable(name, var.typecode(), var.dimensions)
1173
1174
1175    #Copy the static variables
1176    for name in infile.variables:
1177        if name == 'time' or name == 'stage':
1178            pass
1179        else:
1180            #Copy
1181            outfile.variables[name][:] = infile.variables[name][:]
1182
1183    #Copy selected timesteps
1184    time = infile.variables['time']
1185    stage = infile.variables['stage']
1186
1187    newtime = outfile.variables['time']
1188    newstage = outfile.variables['stage']
1189
1190    if last is None:
1191        last = len(time)
1192
1193    selection = range(first, last, step)
1194    for i, j in enumerate(selection):
1195        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1196        newtime[i] = time[j]
1197        newstage[i,:] = stage[j,:]
1198
1199    #Close
1200    infile.close()
1201    outfile.close()
1202
1203
1204#Get data objects
1205def get_dataobject(domain, mode='w'):
1206    """Return instance of class of given format using filename
1207    """
1208
1209    cls = eval('Data_format_%s' %domain.format)
1210    return cls(domain, mode)
1211
1212#FIXME move into geospatial.  There should only be one method that
1213# reads xya, and writes pts.
1214def xya2pts(basename_in, basename_out=None, verbose=False,
1215            #easting_min=None, easting_max=None,
1216            #northing_min=None, northing_max=None,
1217            stride = 1,
1218            attribute_name = 'elevation',
1219            z_func = None):
1220    """Read points data from ascii (.xya)
1221
1222    Example:
1223
1224              x(m)        y(m)        z(m)
1225         0.00000e+00  0.00000e+00  1.3535000e-01
1226         0.00000e+00  1.40000e-02  1.3535000e-01
1227
1228
1229
1230    Convert to NetCDF pts format which is
1231
1232    points:  (Nx2) Float array
1233    elevation: N Float array
1234
1235    Only lines that contain three numeric values are processed
1236
1237    If z_func is specified, it will be applied to the third field
1238    """
1239
1240    import os
1241    #from Scientific.IO.NetCDF import NetCDFFile
1242    from Numeric import Float, arrayrange, concatenate
1243
1244    root, ext = os.path.splitext(basename_in)
1245
1246    if ext == '': ext = '.xya'
1247
1248    #Get NetCDF
1249    infile = open(root + ext, 'r')  #Open existing xya file for read
1250
1251    if verbose: print 'Reading xya points from %s' %(root + ext)
1252
1253    points = []
1254    attribute = []
1255    for i, line in enumerate(infile.readlines()):
1256
1257        if i % stride != 0: continue
1258
1259        fields = line.split()
1260
1261        try:
1262            assert len(fields) == 3
1263        except:
1264            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1265
1266        try:
1267            x = float( fields[0] )
1268            y = float( fields[1] )
1269            z = float( fields[2] )
1270        except:
1271            continue
1272
1273        points.append( [x, y] )
1274
1275        if callable(z_func):
1276            attribute.append(z_func(z))
1277        else:
1278            attribute.append(z)
1279
1280
1281    #Get output file
1282    if basename_out == None:
1283        ptsname = root + '.pts'
1284    else:
1285        ptsname = basename_out + '.pts'
1286
1287    if verbose: print 'Store to NetCDF file %s' %ptsname
1288    write_ptsfile(ptsname, points, attribute, attribute_name)
1289
1290
1291
1292######Obsoleted by export_points in load_mesh
1293def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1294                  zone=None, xllcorner=None, yllcorner=None):
1295    """Write points and associated attribute to pts (NetCDF) format
1296    """
1297
1298    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1299
1300    from Numeric import Float
1301
1302    if attribute_name is None:
1303        attribute_name = 'attribute'
1304
1305
1306    from Scientific.IO.NetCDF import NetCDFFile
1307
1308    # NetCDF file definition
1309    outfile = NetCDFFile(ptsname, 'w')
1310
1311
1312    #Create new file
1313    outfile.institution = 'Geoscience Australia'
1314    outfile.description = 'NetCDF pts format for compact and '\
1315                          'portable storage of spatial point data'
1316
1317
1318    #Georeferencing
1319    from anuga.coordinate_transforms.geo_reference import Geo_reference
1320    if zone is None:
1321        assert xllcorner is None, 'xllcorner must be None'
1322        assert yllcorner is None, 'yllcorner must be None'
1323        Geo_reference().write_NetCDF(outfile)
1324    else:
1325        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1326
1327
1328
1329    outfile.createDimension('number_of_points', len(points))
1330    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1331
1332    # variable definitions
1333    outfile.createVariable('points', Float, ('number_of_points',
1334                                             'number_of_dimensions'))
1335    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1336
1337    # Get handles to the variables
1338    nc_points = outfile.variables['points']
1339    nc_attribute = outfile.variables[attribute_name]
1340
1341    #Store data
1342    nc_points[:, :] = points
1343    nc_attribute[:] = attribute
1344
1345    outfile.close()
1346
1347
1348def dem2pts(basename_in, basename_out=None,
1349            easting_min=None, easting_max=None,
1350            northing_min=None, northing_max=None,
1351            use_cache=False, verbose=False,):
1352    """Read Digitial Elevation model from the following NetCDF format (.dem)
1353
1354    Example:
1355
1356    ncols         3121
1357    nrows         1800
1358    xllcorner     722000
1359    yllcorner     5893000
1360    cellsize      25
1361    NODATA_value  -9999
1362    138.3698 137.4194 136.5062 135.5558 ..........
1363
1364    Convert to NetCDF pts format which is
1365
1366    points:  (Nx2) Float array
1367    elevation: N Float array
1368    """
1369
1370
1371
1372    kwargs = {'basename_out': basename_out,
1373              'easting_min': easting_min,
1374              'easting_max': easting_max,
1375              'northing_min': northing_min,
1376              'northing_max': northing_max,
1377              'verbose': verbose}
1378
1379    if use_cache is True:
1380        from caching import cache
1381        result = cache(_dem2pts, basename_in, kwargs,
1382                       dependencies = [basename_in + '.dem'],
1383                       verbose = verbose)
1384
1385    else:
1386        result = apply(_dem2pts, [basename_in], kwargs)
1387
1388    return result
1389
1390
1391def _dem2pts(basename_in, basename_out=None, verbose=False,
1392            easting_min=None, easting_max=None,
1393            northing_min=None, northing_max=None):
1394    """Read Digitial Elevation model from the following NetCDF format (.dem)
1395
1396    Internal function. See public function dem2pts for details.
1397    """
1398
1399    #FIXME: Can this be written feasibly using write_pts?
1400
1401    import os
1402    from Scientific.IO.NetCDF import NetCDFFile
1403    from Numeric import Float, zeros, reshape, sum
1404
1405    root = basename_in
1406
1407    #Get NetCDF
1408    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1409
1410    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1411
1412    ncols = infile.ncols[0]
1413    nrows = infile.nrows[0]
1414    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1415    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1416    cellsize = infile.cellsize[0]
1417    NODATA_value = infile.NODATA_value[0]
1418    dem_elevation = infile.variables['elevation']
1419
1420    zone = infile.zone[0]
1421    false_easting = infile.false_easting[0]
1422    false_northing = infile.false_northing[0]
1423
1424    #Text strings
1425    projection = infile.projection
1426    datum = infile.datum
1427    units = infile.units
1428
1429
1430    #Get output file
1431    if basename_out == None:
1432        ptsname = root + '.pts'
1433    else:
1434        ptsname = basename_out + '.pts'
1435
1436    if verbose: print 'Store to NetCDF file %s' %ptsname
1437    # NetCDF file definition
1438    outfile = NetCDFFile(ptsname, 'w')
1439
1440    #Create new file
1441    outfile.institution = 'Geoscience Australia'
1442    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1443                      'of spatial point data'
1444    #assign default values
1445    if easting_min is None: easting_min = xllcorner
1446    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1447    if northing_min is None: northing_min = yllcorner
1448    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1449
1450    #compute offsets to update georeferencing
1451    easting_offset = xllcorner - easting_min
1452    northing_offset = yllcorner - northing_min
1453
1454    #Georeferencing
1455    outfile.zone = zone
1456    outfile.xllcorner = easting_min #Easting of lower left corner
1457    outfile.yllcorner = northing_min #Northing of lower left corner
1458    outfile.false_easting = false_easting
1459    outfile.false_northing = false_northing
1460
1461    outfile.projection = projection
1462    outfile.datum = datum
1463    outfile.units = units
1464
1465
1466    #Grid info (FIXME: probably not going to be used, but heck)
1467    outfile.ncols = ncols
1468    outfile.nrows = nrows
1469
1470    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1471    totalnopoints = nrows*ncols
1472
1473    # calculating number of NODATA_values for each row in clipped region
1474    #FIXME: use array operations to do faster
1475    nn = 0
1476    k = 0
1477    i1_0 = 0
1478    j1_0 = 0
1479    thisj = 0
1480    thisi = 0
1481    for i in range(nrows):
1482        y = (nrows-i-1)*cellsize + yllcorner
1483        for j in range(ncols):
1484            x = j*cellsize + xllcorner
1485            if easting_min <= x <= easting_max and \
1486               northing_min <= y <= northing_max:
1487                thisj = j
1488                thisi = i
1489                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1490
1491                if k == 0:
1492                    i1_0 = i
1493                    j1_0 = j
1494                k += 1
1495
1496    index1 = j1_0
1497    index2 = thisj
1498
1499    # dimension definitions
1500    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1501    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1502
1503    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1504    nopoints = clippednopoints-nn
1505
1506    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1507
1508    if verbose:
1509        print 'There are %d values in the elevation' %totalnopoints
1510        print 'There are %d values in the clipped elevation' %clippednopoints
1511        print 'There are %d NODATA_values in the clipped elevation' %nn
1512
1513    outfile.createDimension('number_of_points', nopoints)
1514    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1515
1516    # variable definitions
1517    outfile.createVariable('points', Float, ('number_of_points',
1518                                             'number_of_dimensions'))
1519    outfile.createVariable('elevation', Float, ('number_of_points',))
1520
1521    # Get handles to the variables
1522    points = outfile.variables['points']
1523    elevation = outfile.variables['elevation']
1524
1525    lenv = index2-index1+1
1526    #Store data
1527    global_index = 0
1528    #for i in range(nrows):
1529    for i in range(i1_0,thisi+1,1):
1530        if verbose and i%((nrows+10)/10)==0:
1531            print 'Processing row %d of %d' %(i, nrows)
1532
1533        lower_index = global_index
1534
1535        v = dem_elevation_r[i,index1:index2+1]
1536        no_NODATA = sum(v == NODATA_value)
1537        if no_NODATA > 0:
1538            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1539        else:
1540            newcols = lenv #ncols_in_bounding_box
1541
1542        telev = zeros(newcols, Float)
1543        tpoints = zeros((newcols, 2), Float)
1544
1545        local_index = 0
1546
1547        y = (nrows-i-1)*cellsize + yllcorner
1548        #for j in range(ncols):
1549        for j in range(j1_0,index2+1,1):
1550
1551            x = j*cellsize + xllcorner
1552            if easting_min <= x <= easting_max and \
1553               northing_min <= y <= northing_max and \
1554               dem_elevation_r[i,j] <> NODATA_value:
1555                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1556                telev[local_index] = dem_elevation_r[i, j]
1557                global_index += 1
1558                local_index += 1
1559
1560        upper_index = global_index
1561
1562        if upper_index == lower_index + newcols:
1563            points[lower_index:upper_index, :] = tpoints
1564            elevation[lower_index:upper_index] = telev
1565
1566    assert global_index == nopoints, 'index not equal to number of points'
1567
1568    infile.close()
1569    outfile.close()
1570
1571
1572
1573def _read_hecras_cross_sections(lines):
1574    """Return block of surface lines for each cross section
1575    Starts with SURFACE LINE,
1576    Ends with END CROSS-SECTION
1577    """
1578
1579    points = []
1580
1581    reading_surface = False
1582    for i, line in enumerate(lines):
1583
1584        if len(line.strip()) == 0:    #Ignore blanks
1585            continue
1586
1587        if lines[i].strip().startswith('SURFACE LINE'):
1588            reading_surface = True
1589            continue
1590
1591        if lines[i].strip().startswith('END') and reading_surface:
1592            yield points
1593            reading_surface = False
1594            points = []
1595
1596        if reading_surface:
1597            fields = line.strip().split(',')
1598            easting = float(fields[0])
1599            northing = float(fields[1])
1600            elevation = float(fields[2])
1601            points.append([easting, northing, elevation])
1602
1603
1604
1605
1606def hecras_cross_sections2pts(basename_in,
1607                              basename_out=None,
1608                              verbose=False):
1609    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1610
1611    Example:
1612
1613
1614# RAS export file created on Mon 15Aug2005 11:42
1615# by HEC-RAS Version 3.1.1
1616
1617BEGIN HEADER:
1618  UNITS: METRIC
1619  DTM TYPE: TIN
1620  DTM: v:\1\cit\perth_topo\river_tin
1621  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1622  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1623  MAP PROJECTION: UTM
1624  PROJECTION ZONE: 50
1625  DATUM: AGD66
1626  VERTICAL DATUM:
1627  NUMBER OF REACHES:  19
1628  NUMBER OF CROSS-SECTIONS:  14206
1629END HEADER:
1630
1631
1632Only the SURFACE LINE data of the following form will be utilised
1633
1634  CROSS-SECTION:
1635    STREAM ID:Southern-Wungong
1636    REACH ID:Southern-Wungong
1637    STATION:19040.*
1638    CUT LINE:
1639      405548.671603161 , 6438142.7594925
1640      405734.536092045 , 6438326.10404912
1641      405745.130459356 , 6438331.48627354
1642      405813.89633823 , 6438368.6272789
1643    SURFACE LINE:
1644     405548.67,   6438142.76,   35.37
1645     405552.24,   6438146.28,   35.41
1646     405554.78,   6438148.78,   35.44
1647     405555.80,   6438149.79,   35.44
1648     405559.37,   6438153.31,   35.45
1649     405560.88,   6438154.81,   35.44
1650     405562.93,   6438156.83,   35.42
1651     405566.50,   6438160.35,   35.38
1652     405566.99,   6438160.83,   35.37
1653     ...
1654   END CROSS-SECTION
1655
1656    Convert to NetCDF pts format which is
1657
1658    points:  (Nx2) Float array
1659    elevation: N Float array
1660    """
1661
1662    #FIXME: Can this be written feasibly using write_pts?
1663
1664    import os
1665    from Scientific.IO.NetCDF import NetCDFFile
1666    from Numeric import Float, zeros, reshape
1667
1668    root = basename_in
1669
1670    #Get ASCII file
1671    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1672
1673    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1674
1675    lines = infile.readlines()
1676    infile.close()
1677
1678    if verbose: print 'Converting to pts format'
1679
1680    i = 0
1681    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1682        i += 1
1683
1684    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1685    i += 1
1686
1687    assert lines[i].strip().upper().startswith('UNITS:')
1688    units = lines[i].strip().split()[1]
1689    i += 1
1690
1691    assert lines[i].strip().upper().startswith('DTM TYPE:')
1692    i += 1
1693
1694    assert lines[i].strip().upper().startswith('DTM:')
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('STREAM')
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('CROSS')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1704    projection = lines[i].strip().split(':')[1]
1705    i += 1
1706
1707    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1708    zone = int(lines[i].strip().split(':')[1])
1709    i += 1
1710
1711    assert lines[i].strip().upper().startswith('DATUM:')
1712    datum = lines[i].strip().split(':')[1]
1713    i += 1
1714
1715    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1716    i += 1
1717
1718    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1719    i += 1
1720
1721    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1722    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1723    i += 1
1724
1725
1726    #Now read all points
1727    points = []
1728    elevation = []
1729    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1730        for k, entry in enumerate(entries):
1731            points.append(entry[:2])
1732            elevation.append(entry[2])
1733
1734
1735    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1736          %(j+1, number_of_cross_sections)
1737    assert j+1 == number_of_cross_sections, msg
1738
1739    #Get output file
1740    if basename_out == None:
1741        ptsname = root + '.pts'
1742    else:
1743        ptsname = basename_out + '.pts'
1744
1745    #FIXME (DSG-ON): use loadASCII export_points_file
1746    if verbose: print 'Store to NetCDF file %s' %ptsname
1747    # NetCDF file definition
1748    outfile = NetCDFFile(ptsname, 'w')
1749
1750    #Create new file
1751    outfile.institution = 'Geoscience Australia'
1752    outfile.description = 'NetCDF pts format for compact and portable ' +\
1753                          'storage of spatial point data derived from HEC-RAS'
1754
1755    #Georeferencing
1756    outfile.zone = zone
1757    outfile.xllcorner = 0.0
1758    outfile.yllcorner = 0.0
1759    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1760    outfile.false_northing = 1000000
1761
1762    outfile.projection = projection
1763    outfile.datum = datum
1764    outfile.units = units
1765
1766
1767    outfile.createDimension('number_of_points', len(points))
1768    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1769
1770    # variable definitions
1771    outfile.createVariable('points', Float, ('number_of_points',
1772                                             'number_of_dimensions'))
1773    outfile.createVariable('elevation', Float, ('number_of_points',))
1774
1775    # Get handles to the variables
1776    outfile.variables['points'][:] = points
1777    outfile.variables['elevation'][:] = elevation
1778
1779    outfile.close()
1780
1781
1782
1783def sww2dem(basename_in, basename_out = None,
1784            quantity = None,
1785            timestep = None,
1786            reduction = None,
1787            cellsize = 10,
1788            NODATA_value = -9999,
1789            easting_min = None,
1790            easting_max = None,
1791            northing_min = None,
1792            northing_max = None,
1793            verbose = False,
1794            origin = None,
1795            datum = 'WGS84',
1796        format = 'ers'):
1797
1798    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1799
1800    Example (ASC):
1801
1802    ncols         3121
1803    nrows         1800
1804    xllcorner     722000
1805    yllcorner     5893000
1806    cellsize      25
1807    NODATA_value  -9999
1808    138.3698 137.4194 136.5062 135.5558 ..........
1809
1810    Also write accompanying file with same basename_in but extension .prj
1811    used to fix the UTM zone, datum, false northings and eastings.
1812
1813    The prj format is assumed to be as
1814
1815    Projection    UTM
1816    Zone          56
1817    Datum         WGS84
1818    Zunits        NO
1819    Units         METERS
1820    Spheroid      WGS84
1821    Xshift        0.0000000000
1822    Yshift        10000000.0000000000
1823    Parameters
1824
1825
1826    The parameter quantity must be the name of an existing quantity or
1827    an expression involving existing quantities. The default is
1828    'elevation'.
1829
1830    if timestep (an index) is given, output quantity at that timestep
1831
1832    if reduction is given use that to reduce quantity over all timesteps.
1833
1834    datum
1835
1836    format can be either 'asc' or 'ers'
1837    """
1838
1839    import sys
1840    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1841    from Numeric import array2string
1842
1843    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1844    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1845
1846    msg = 'Format must be either asc or ers'
1847    assert format.lower() in ['asc', 'ers'], msg
1848
1849
1850    false_easting = 500000
1851    false_northing = 10000000
1852
1853    if quantity is None:
1854        quantity = 'elevation'
1855
1856    if reduction is None:
1857        reduction = max
1858
1859    if basename_out is None:
1860        basename_out = basename_in + '_%s' %quantity
1861
1862    swwfile = basename_in + '.sww'
1863    demfile = basename_out + '.' + format
1864    # Note the use of a .ers extension is optional (write_ermapper_grid will
1865    # deal with either option
1866
1867    #Read sww file
1868    if verbose: print 'Reading from %s' %swwfile
1869    from Scientific.IO.NetCDF import NetCDFFile
1870    fid = NetCDFFile(swwfile)
1871
1872    #Get extent and reference
1873    x = fid.variables['x'][:]
1874    y = fid.variables['y'][:]
1875    volumes = fid.variables['volumes'][:]
1876    if timestep is not None:
1877        times = fid.variables['time'][timestep]
1878    else:
1879        times = fid.variables['time'][:]
1880
1881    number_of_timesteps = fid.dimensions['number_of_timesteps']
1882    number_of_points = fid.dimensions['number_of_points']
1883   
1884    if origin is None:
1885
1886        #Get geo_reference
1887        #sww files don't have to have a geo_ref
1888        try:
1889            geo_reference = Geo_reference(NetCDFObject=fid)
1890        except AttributeError, e:
1891            geo_reference = Geo_reference() #Default georef object
1892
1893        xllcorner = geo_reference.get_xllcorner()
1894        yllcorner = geo_reference.get_yllcorner()
1895        zone = geo_reference.get_zone()
1896    else:
1897        zone = origin[0]
1898        xllcorner = origin[1]
1899        yllcorner = origin[2]
1900
1901
1902
1903    #FIXME: Refactor using code from file_function.statistics
1904    #Something like print swwstats(swwname)
1905    if verbose:
1906        print '------------------------------------------------'
1907        print 'Statistics of SWW file:'
1908        print '  Name: %s' %swwfile
1909        print '  Reference:'
1910        print '    Lower left corner: [%f, %f]'\
1911              %(xllcorner, yllcorner)
1912        if timestep is not None:
1913            print '    Time: %f' %(times)
1914        else:
1915            print '    Start time: %f' %fid.starttime[0]
1916        print '  Extent:'
1917        print '    x [m] in [%f, %f], len(x) == %d'\
1918              %(min(x.flat), max(x.flat), len(x.flat))
1919        print '    y [m] in [%f, %f], len(y) == %d'\
1920              %(min(y.flat), max(y.flat), len(y.flat))
1921        if timestep is not None:
1922            print '    t [s] = %f, len(t) == %d' %(times, 1)
1923        else:
1924            print '    t [s] in [%f, %f], len(t) == %d'\
1925              %(min(times), max(times), len(times))
1926        print '  Quantities [SI units]:'
1927        for name in ['stage', 'xmomentum', 'ymomentum']:
1928            q = fid.variables[name][:].flat
1929            if timestep is not None:
1930                q = q[timestep*len(x):(timestep+1)*len(x)]
1931            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1932        for name in ['elevation']:
1933            q = fid.variables[name][:].flat
1934            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1935           
1936    # Get quantity and reduce if applicable
1937    if verbose: print 'Processing quantity %s' %quantity
1938
1939    # Turn NetCDF objects into Numeric arrays
1940    quantity_dict = {}
1941    for name in fid.variables.keys():
1942        quantity_dict[name] = fid.variables[name][:] 
1943
1944
1945    # Convert quantity expression to quantities found in sww file   
1946    q = apply_expression_to_dictionary(quantity, quantity_dict)
1947
1948    if len(q.shape) == 2:
1949        #q has a time component and needs to be reduced along
1950        #the temporal dimension
1951        if verbose: print 'Reducing quantity %s' %quantity
1952        q_reduced = zeros( number_of_points, Float )
1953       
1954        if timestep is not None:
1955            for k in range(number_of_points):
1956                q_reduced[k] = q[timestep,k] 
1957        else:
1958            for k in range(number_of_points):
1959                q_reduced[k] = reduction( q[:,k] )
1960
1961        q = q_reduced
1962
1963    #Post condition: Now q has dimension: number_of_points
1964    assert len(q.shape) == 1
1965    assert q.shape[0] == number_of_points
1966
1967
1968    if verbose:
1969        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1970
1971
1972    #Create grid and update xll/yll corner and x,y
1973
1974    #Relative extent
1975    if easting_min is None:
1976        xmin = min(x)
1977    else:
1978        xmin = easting_min - xllcorner
1979
1980    if easting_max is None:
1981        xmax = max(x)
1982    else:
1983        xmax = easting_max - xllcorner
1984
1985    if northing_min is None:
1986        ymin = min(y)
1987    else:
1988        ymin = northing_min - yllcorner
1989
1990    if northing_max is None:
1991        ymax = max(y)
1992    else:
1993        ymax = northing_max - yllcorner
1994
1995
1996
1997    if verbose: print 'Creating grid'
1998    ncols = int((xmax-xmin)/cellsize)+1
1999    nrows = int((ymax-ymin)/cellsize)+1
2000
2001
2002    #New absolute reference and coordinates
2003    newxllcorner = xmin+xllcorner
2004    newyllcorner = ymin+yllcorner
2005
2006    x = x+xllcorner-newxllcorner
2007    y = y+yllcorner-newyllcorner
2008   
2009    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2010    assert len(vertex_points.shape) == 2
2011
2012    grid_points = zeros ( (ncols*nrows, 2), Float )
2013
2014
2015    for i in xrange(nrows):
2016        if format.lower() == 'asc':
2017            yg = i*cellsize
2018        else:
2019        #this will flip the order of the y values for ers
2020            yg = (nrows-i)*cellsize
2021
2022        for j in xrange(ncols):
2023            xg = j*cellsize
2024            k = i*ncols + j
2025
2026            grid_points[k,0] = xg
2027            grid_points[k,1] = yg
2028
2029    #Interpolate
2030    #from least_squares import Interpolation
2031    from anuga.fit_interpolate.interpolate import Interpolate
2032
2033    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2034
2035    #Interpolate using quantity values
2036    if verbose: print 'Interpolating'
2037    grid_values = interp.interpolate(q, grid_points).flat
2038
2039
2040    if verbose:
2041        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2042                                                      max(grid_values))
2043
2044    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2045    P = interp.mesh.get_boundary_polygon()
2046    outside_indices = outside_polygon(grid_points, P, closed=True)
2047
2048    for i in outside_indices:
2049        grid_values[i] = NODATA_value
2050
2051
2052
2053
2054    if format.lower() == 'ers':
2055        # setup ERS header information
2056        grid_values = reshape(grid_values,(nrows, ncols))
2057        header = {}
2058        header['datum'] = '"' + datum + '"'
2059        # FIXME The use of hardwired UTM and zone number needs to be made optional
2060        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2061        header['projection'] = '"UTM-' + str(zone) + '"'
2062        header['coordinatetype'] = 'EN'
2063        if header['coordinatetype'] == 'LL':
2064            header['longitude'] = str(newxllcorner)
2065            header['latitude'] = str(newyllcorner)
2066        elif header['coordinatetype'] == 'EN':
2067            header['eastings'] = str(newxllcorner)
2068            header['northings'] = str(newyllcorner)
2069        header['nullcellvalue'] = str(NODATA_value)
2070        header['xdimension'] = str(cellsize)
2071        header['ydimension'] = str(cellsize)
2072        header['value'] = '"' + quantity + '"'
2073        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2074
2075
2076        #Write
2077        if verbose: print 'Writing %s' %demfile
2078        import ermapper_grids
2079        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2080
2081        fid.close()
2082    else:
2083        #Write to Ascii format
2084
2085        #Write prj file
2086        prjfile = basename_out + '.prj'
2087
2088        if verbose: print 'Writing %s' %prjfile
2089        prjid = open(prjfile, 'w')
2090        prjid.write('Projection    %s\n' %'UTM')
2091        prjid.write('Zone          %d\n' %zone)
2092        prjid.write('Datum         %s\n' %datum)
2093        prjid.write('Zunits        NO\n')
2094        prjid.write('Units         METERS\n')
2095        prjid.write('Spheroid      %s\n' %datum)
2096        prjid.write('Xshift        %d\n' %false_easting)
2097        prjid.write('Yshift        %d\n' %false_northing)
2098        prjid.write('Parameters\n')
2099        prjid.close()
2100
2101
2102
2103        if verbose: print 'Writing %s' %demfile
2104
2105        ascid = open(demfile, 'w')
2106
2107        ascid.write('ncols         %d\n' %ncols)
2108        ascid.write('nrows         %d\n' %nrows)
2109        ascid.write('xllcorner     %d\n' %newxllcorner)
2110        ascid.write('yllcorner     %d\n' %newyllcorner)
2111        ascid.write('cellsize      %f\n' %cellsize)
2112        ascid.write('NODATA_value  %d\n' %NODATA_value)
2113
2114
2115        #Get bounding polygon from mesh
2116        #P = interp.mesh.get_boundary_polygon()
2117        #inside_indices = inside_polygon(grid_points, P)
2118
2119        for i in range(nrows):
2120            if verbose and i%((nrows+10)/10)==0:
2121                print 'Doing row %d of %d' %(i, nrows)
2122
2123            base_index = (nrows-i-1)*ncols
2124
2125            slice = grid_values[base_index:base_index+ncols]
2126            s = array2string(slice, max_line_width=sys.maxint)
2127            ascid.write(s[1:-1] + '\n')
2128
2129
2130            #print
2131            #for j in range(ncols):
2132            #    index = base_index+j#
2133            #    print grid_values[index],
2134            #    ascid.write('%f ' %grid_values[index])
2135            #ascid.write('\n')
2136
2137        #Close
2138        ascid.close()
2139        fid.close()
2140
2141#Backwards compatibility
2142def sww2asc(basename_in, basename_out = None,
2143            quantity = None,
2144            timestep = None,
2145            reduction = None,
2146            cellsize = 10,
2147            verbose = False,
2148            origin = None):
2149    print 'sww2asc will soon be obsoleted - please use sww2dem'
2150    sww2dem(basename_in,
2151            basename_out = basename_out,
2152            quantity = quantity,
2153            timestep = timestep,
2154            reduction = reduction,
2155            cellsize = cellsize,
2156            verbose = verbose,
2157            origin = origin,
2158        datum = 'WGS84',
2159        format = 'asc')
2160
2161def sww2ers(basename_in, basename_out = None,
2162            quantity = None,
2163            timestep = None,
2164            reduction = None,
2165            cellsize = 10,
2166            verbose = False,
2167            origin = None,
2168            datum = 'WGS84'):
2169    print 'sww2ers will soon be obsoleted - please use sww2dem'
2170    sww2dem(basename_in,
2171            basename_out = basename_out,
2172            quantity = quantity,
2173            timestep = timestep,
2174            reduction = reduction,
2175            cellsize = cellsize,
2176            verbose = verbose,
2177            origin = origin,
2178            datum = datum,
2179            format = 'ers')
2180################################# END COMPATIBILITY ##############
2181
2182
2183
2184def sww2pts(basename_in, basename_out=None,
2185            data_points=None,
2186            quantity=None,
2187            timestep=None,
2188            reduction=None,
2189            NODATA_value=-9999,
2190            verbose=False,
2191            origin=None):
2192            #datum = 'WGS84')
2193
2194
2195    """Read SWW file and convert to interpolated values at selected points
2196
2197    The parameter quantity' must be the name of an existing quantity or
2198    an expression involving existing quantities. The default is
2199    'elevation'.
2200
2201    if timestep (an index) is given, output quantity at that timestep
2202
2203    if reduction is given use that to reduce quantity over all timesteps.
2204
2205    data_points (Nx2 array) give locations of points where quantity is to be computed.
2206   
2207    """
2208
2209    import sys
2210    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2211    from Numeric import array2string
2212
2213    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2214    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2215
2216    from anuga.geospatial_data.geospatial_data import Geospatial_data
2217
2218    if quantity is None:
2219        quantity = 'elevation'
2220
2221    if reduction is None:
2222        reduction = max
2223
2224    if basename_out is None:
2225        basename_out = basename_in + '_%s' %quantity
2226
2227    swwfile = basename_in + '.sww'
2228    ptsfile = basename_out + '.pts'
2229
2230    # Read sww file
2231    if verbose: print 'Reading from %s' %swwfile
2232    from Scientific.IO.NetCDF import NetCDFFile
2233    fid = NetCDFFile(swwfile)
2234
2235    # Get extent and reference
2236    x = fid.variables['x'][:]
2237    y = fid.variables['y'][:]
2238    volumes = fid.variables['volumes'][:]
2239
2240    number_of_timesteps = fid.dimensions['number_of_timesteps']
2241    number_of_points = fid.dimensions['number_of_points']
2242    if origin is None:
2243
2244        # Get geo_reference
2245        # sww files don't have to have a geo_ref
2246        try:
2247            geo_reference = Geo_reference(NetCDFObject=fid)
2248        except AttributeError, e:
2249            geo_reference = Geo_reference() #Default georef object
2250
2251        xllcorner = geo_reference.get_xllcorner()
2252        yllcorner = geo_reference.get_yllcorner()
2253        zone = geo_reference.get_zone()
2254    else:
2255        zone = origin[0]
2256        xllcorner = origin[1]
2257        yllcorner = origin[2]
2258
2259
2260
2261    # FIXME: Refactor using code from file_function.statistics
2262    # Something like print swwstats(swwname)
2263    if verbose:
2264        x = fid.variables['x'][:]
2265        y = fid.variables['y'][:]
2266        times = fid.variables['time'][:]
2267        print '------------------------------------------------'
2268        print 'Statistics of SWW file:'
2269        print '  Name: %s' %swwfile
2270        print '  Reference:'
2271        print '    Lower left corner: [%f, %f]'\
2272              %(xllcorner, yllcorner)
2273        print '    Start time: %f' %fid.starttime[0]
2274        print '  Extent:'
2275        print '    x [m] in [%f, %f], len(x) == %d'\
2276              %(min(x.flat), max(x.flat), len(x.flat))
2277        print '    y [m] in [%f, %f], len(y) == %d'\
2278              %(min(y.flat), max(y.flat), len(y.flat))
2279        print '    t [s] in [%f, %f], len(t) == %d'\
2280              %(min(times), max(times), len(times))
2281        print '  Quantities [SI units]:'
2282        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2283            q = fid.variables[name][:].flat
2284            print '    %s in [%f, %f]' %(name, min(q), max(q))
2285
2286
2287
2288    # Get quantity and reduce if applicable
2289    if verbose: print 'Processing quantity %s' %quantity
2290
2291    # Turn NetCDF objects into Numeric arrays
2292    quantity_dict = {}
2293    for name in fid.variables.keys():
2294        quantity_dict[name] = fid.variables[name][:]
2295
2296
2297
2298    # Convert quantity expression to quantities found in sww file   
2299    q = apply_expression_to_dictionary(quantity, quantity_dict)
2300
2301
2302
2303    if len(q.shape) == 2:
2304        # q has a time component and needs to be reduced along
2305        # the temporal dimension
2306        if verbose: print 'Reducing quantity %s' %quantity
2307        q_reduced = zeros( number_of_points, Float )
2308
2309        for k in range(number_of_points):
2310            q_reduced[k] = reduction( q[:,k] )
2311
2312        q = q_reduced
2313
2314    # Post condition: Now q has dimension: number_of_points
2315    assert len(q.shape) == 1
2316    assert q.shape[0] == number_of_points
2317
2318
2319    if verbose:
2320        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2321
2322
2323    # Create grid and update xll/yll corner and x,y
2324    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2325    assert len(vertex_points.shape) == 2
2326
2327    # Interpolate
2328    from anuga.fit_interpolate.interpolate import Interpolate
2329    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2330
2331    # Interpolate using quantity values
2332    if verbose: print 'Interpolating'
2333    interpolated_values = interp.interpolate(q, data_points).flat
2334
2335
2336    if verbose:
2337        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2338                                                      max(interpolated_values))
2339
2340    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2341    P = interp.mesh.get_boundary_polygon()
2342    outside_indices = outside_polygon(data_points, P, closed=True)
2343
2344    for i in outside_indices:
2345        interpolated_values[i] = NODATA_value
2346
2347
2348    # Store results   
2349    G = Geospatial_data(data_points=data_points,
2350                        attributes=interpolated_values)
2351
2352    G.export_points_file(ptsfile, absolute = True)
2353
2354    fid.close()
2355
2356
2357def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2358                                  use_cache = False,
2359                                  verbose = False):
2360    """Read Digitial Elevation model from the following ASCII format (.asc)
2361
2362    Example:
2363
2364    ncols         3121
2365    nrows         1800
2366    xllcorner     722000
2367    yllcorner     5893000
2368    cellsize      25
2369    NODATA_value  -9999
2370    138.3698 137.4194 136.5062 135.5558 ..........
2371
2372    Convert basename_in + '.asc' to NetCDF format (.dem)
2373    mimicking the ASCII format closely.
2374
2375
2376    An accompanying file with same basename_in but extension .prj must exist
2377    and is used to fix the UTM zone, datum, false northings and eastings.
2378
2379    The prj format is assumed to be as
2380
2381    Projection    UTM
2382    Zone          56
2383    Datum         WGS84
2384    Zunits        NO
2385    Units         METERS
2386    Spheroid      WGS84
2387    Xshift        0.0000000000
2388    Yshift        10000000.0000000000
2389    Parameters
2390    """
2391
2392
2393
2394    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2395
2396    if use_cache is True:
2397        from caching import cache
2398        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2399                       dependencies = [basename_in + '.asc'],
2400                       verbose = verbose)
2401
2402    else:
2403        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2404
2405    return result
2406
2407
2408
2409
2410
2411
2412def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2413                                  verbose = False):
2414    """Read Digitial Elevation model from the following ASCII format (.asc)
2415
2416    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2417    """
2418
2419    import os
2420    from Scientific.IO.NetCDF import NetCDFFile
2421    from Numeric import Float, array
2422
2423    #root, ext = os.path.splitext(basename_in)
2424    root = basename_in
2425
2426    ###########################################
2427    # Read Meta data
2428    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2429    metadatafile = open(root + '.prj')
2430    metalines = metadatafile.readlines()
2431    metadatafile.close()
2432
2433    L = metalines[0].strip().split()
2434    assert L[0].strip().lower() == 'projection'
2435    projection = L[1].strip()                   #TEXT
2436
2437    L = metalines[1].strip().split()
2438    assert L[0].strip().lower() == 'zone'
2439    zone = int(L[1].strip())
2440
2441    L = metalines[2].strip().split()
2442    assert L[0].strip().lower() == 'datum'
2443    datum = L[1].strip()                        #TEXT
2444
2445    L = metalines[3].strip().split()
2446    assert L[0].strip().lower() == 'zunits'     #IGNORE
2447    zunits = L[1].strip()                       #TEXT
2448
2449    L = metalines[4].strip().split()
2450    assert L[0].strip().lower() == 'units'
2451    units = L[1].strip()                        #TEXT
2452
2453    L = metalines[5].strip().split()
2454    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2455    spheroid = L[1].strip()                     #TEXT
2456
2457    L = metalines[6].strip().split()
2458    assert L[0].strip().lower() == 'xshift'
2459    false_easting = float(L[1].strip())
2460
2461    L = metalines[7].strip().split()
2462    assert L[0].strip().lower() == 'yshift'
2463    false_northing = float(L[1].strip())
2464
2465    #print false_easting, false_northing, zone, datum
2466
2467
2468    ###########################################
2469    #Read DEM data
2470
2471    datafile = open(basename_in + '.asc')
2472
2473    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2474    lines = datafile.readlines()
2475    datafile.close()
2476
2477    if verbose: print 'Got', len(lines), ' lines'
2478
2479    ncols = int(lines[0].split()[1].strip())
2480    nrows = int(lines[1].split()[1].strip())
2481    xllcorner = float(lines[2].split()[1].strip())
2482    yllcorner = float(lines[3].split()[1].strip())
2483    cellsize = float(lines[4].split()[1].strip())
2484    NODATA_value = int(lines[5].split()[1].strip())
2485
2486    assert len(lines) == nrows + 6
2487
2488
2489    ##########################################
2490
2491
2492    if basename_out == None:
2493        netcdfname = root + '.dem'
2494    else:
2495        netcdfname = basename_out + '.dem'
2496
2497    if verbose: print 'Store to NetCDF file %s' %netcdfname
2498    # NetCDF file definition
2499    fid = NetCDFFile(netcdfname, 'w')
2500
2501    #Create new file
2502    fid.institution = 'Geoscience Australia'
2503    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2504                      'of spatial point data'
2505
2506    fid.ncols = ncols
2507    fid.nrows = nrows
2508    fid.xllcorner = xllcorner
2509    fid.yllcorner = yllcorner
2510    fid.cellsize = cellsize
2511    fid.NODATA_value = NODATA_value
2512
2513    fid.zone = zone
2514    fid.false_easting = false_easting
2515    fid.false_northing = false_northing
2516    fid.projection = projection
2517    fid.datum = datum
2518    fid.units = units
2519
2520
2521    # dimension definitions
2522    fid.createDimension('number_of_rows', nrows)
2523    fid.createDimension('number_of_columns', ncols)
2524
2525    # variable definitions
2526    fid.createVariable('elevation', Float, ('number_of_rows',
2527                                            'number_of_columns'))
2528
2529    # Get handles to the variables
2530    elevation = fid.variables['elevation']
2531
2532    #Store data
2533    n = len(lines[6:])
2534    for i, line in enumerate(lines[6:]):
2535        fields = line.split()
2536        if verbose and i%((n+10)/10)==0:
2537            print 'Processing row %d of %d' %(i, nrows)
2538
2539        elevation[i, :] = array([float(x) for x in fields])
2540
2541    fid.close()
2542
2543
2544
2545
2546
2547def ferret2sww(basename_in, basename_out = None,
2548               verbose = False,
2549               minlat = None, maxlat = None,
2550               minlon = None, maxlon = None,
2551               mint = None, maxt = None, mean_stage = 0,
2552               origin = None, zscale = 1,
2553               fail_on_NaN = True,
2554               NaN_filler = 0,
2555               elevation = None,
2556               inverted_bathymetry = True
2557               ): #FIXME: Bathymetry should be obtained
2558                                  #from MOST somehow.
2559                                  #Alternatively from elsewhere
2560                                  #or, as a last resort,
2561                                  #specified here.
2562                                  #The value of -100 will work
2563                                  #for the Wollongong tsunami
2564                                  #scenario but is very hacky
2565    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2566    sww format native to abstract_2d_finite_volumes.
2567
2568    Specify only basename_in and read files of the form
2569    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2570    relative height, x-velocity and y-velocity, respectively.
2571
2572    Also convert latitude and longitude to UTM. All coordinates are
2573    assumed to be given in the GDA94 datum.
2574
2575    min's and max's: If omitted - full extend is used.
2576    To include a value min may equal it, while max must exceed it.
2577    Lat and lon are assuemd to be in decimal degrees
2578
2579    origin is a 3-tuple with geo referenced
2580    UTM coordinates (zone, easting, northing)
2581
2582    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2583    which means that longitude is the fastest
2584    varying dimension (row major order, so to speak)
2585
2586    ferret2sww uses grid points as vertices in a triangular grid
2587    counting vertices from lower left corner upwards, then right
2588    """
2589
2590    import os
2591    from Scientific.IO.NetCDF import NetCDFFile
2592    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2593    from Numeric import allclose, around
2594
2595    precision = Float
2596
2597    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2598
2599    if minlat != None:
2600        assert -90 < minlat < 90 , msg
2601    if maxlat != None:
2602        assert -90 < maxlat < 90 , msg
2603    if minlon != None:
2604        assert -180 < minlon < 180 , msg
2605    if maxlon != None:
2606        assert -180 < maxlon < 180 , msg
2607
2608
2609    #Get NetCDF data
2610    if verbose: print 'Reading files %s_*.nc' %basename_in
2611    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2612    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2613    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2614    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2615    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2616
2617    if basename_out is None:
2618        swwname = basename_in + '.sww'
2619    else:
2620        swwname = basename_out + '.sww'
2621
2622#    Get dimensions of file_h
2623    for dimension in file_h.dimensions.keys():
2624        if dimension[:3] == 'LON':
2625            dim_h_longitude = dimension
2626        if dimension[:3] == 'LAT':
2627            dim_h_latitude = dimension
2628        if dimension[:4] == 'TIME':
2629            dim_h_time = dimension
2630
2631#    print 'long:', dim_h_longitude
2632#    print 'lats:', dim_h_latitude
2633#    print 'times:', dim_h_time
2634
2635    times = file_h.variables[dim_h_time]
2636    latitudes = file_h.variables[dim_h_latitude]
2637    longitudes = file_h.variables[dim_h_longitude]
2638   
2639# get dimensions for file_e
2640    for dimension in file_e.dimensions.keys():
2641        if dimension[:3] == 'LON':
2642            dim_e_longitude = dimension
2643        if dimension[:3] == 'LAT':
2644            dim_e_latitude = dimension
2645
2646# get dimensions for file_u
2647    for dimension in file_u.dimensions.keys():
2648        if dimension[:3] == 'LON':
2649            dim_u_longitude = dimension
2650        if dimension[:3] == 'LAT':
2651            dim_u_latitude = dimension
2652        if dimension[:4] == 'TIME':
2653            dim_u_time = dimension
2654           
2655# get dimensions for file_v
2656    for dimension in file_v.dimensions.keys():
2657        if dimension[:3] == 'LON':
2658            dim_v_longitude = dimension
2659        if dimension[:3] == 'LAT':
2660            dim_v_latitude = dimension
2661        if dimension[:4] == 'TIME':
2662            dim_v_time = dimension
2663
2664
2665    #Precision used by most for lat/lon is 4 or 5 decimals
2666    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2667    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2668
2669    #Check that files are compatible
2670    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2671    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2672    assert allclose(latitudes, e_lat)
2673
2674    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2675    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2676    assert allclose(longitudes, e_lon)
2677
2678    if mint == None:
2679        jmin = 0
2680    else:
2681        jmin = searchsorted(times, mint)
2682
2683    if maxt == None:
2684        jmax=len(times)
2685    else:
2686        jmax = searchsorted(times, maxt)
2687
2688    if minlat == None:
2689        kmin=0
2690    else:
2691        kmin = searchsorted(latitudes, minlat)
2692
2693    if maxlat == None:
2694        kmax = len(latitudes)
2695    else:
2696        kmax = searchsorted(latitudes, maxlat)
2697
2698    if minlon == None:
2699        lmin=0
2700    else:
2701        lmin = searchsorted(longitudes, minlon)
2702
2703    if maxlon == None:
2704        lmax = len(longitudes)
2705    else:
2706        lmax = searchsorted(longitudes, maxlon)
2707
2708#    print' j', jmin, jmax
2709    times = times[jmin:jmax]
2710    latitudes = latitudes[kmin:kmax]
2711    longitudes = longitudes[lmin:lmax]
2712
2713
2714    if verbose: print 'cropping'
2715    zname = 'ELEVATION'
2716
2717    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2718    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2719    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2720    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2721
2722    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2723    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2724    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2725    #        from Numeric import asarray
2726    #        elevations=elevations.tolist()
2727    #        elevations.reverse()
2728    #        elevations=asarray(elevations)
2729    #    else:
2730    #        from Numeric import asarray
2731    #        elevations=elevations.tolist()
2732    #        elevations.reverse()
2733    #        elevations=asarray(elevations)
2734    #        'print hmmm'
2735
2736
2737
2738    #Get missing values
2739    nan_ha = file_h.variables['HA'].missing_value[0]
2740    nan_ua = file_u.variables['UA'].missing_value[0]
2741    nan_va = file_v.variables['VA'].missing_value[0]
2742    if hasattr(file_e.variables[zname],'missing_value'):
2743        nan_e  = file_e.variables[zname].missing_value[0]
2744    else:
2745        nan_e = None
2746
2747    #Cleanup
2748    from Numeric import sometrue
2749
2750    missing = (amplitudes == nan_ha)
2751    if sometrue (missing):
2752        if fail_on_NaN:
2753            msg = 'NetCDFFile %s contains missing values'\
2754                  %(basename_in+'_ha.nc')
2755            raise DataMissingValuesError, msg
2756        else:
2757            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2758
2759    missing = (uspeed == nan_ua)
2760    if sometrue (missing):
2761        if fail_on_NaN:
2762            msg = 'NetCDFFile %s contains missing values'\
2763                  %(basename_in+'_ua.nc')
2764            raise DataMissingValuesError, msg
2765        else:
2766            uspeed = uspeed*(missing==0) + missing*NaN_filler
2767
2768    missing = (vspeed == nan_va)
2769    if sometrue (missing):
2770        if fail_on_NaN:
2771            msg = 'NetCDFFile %s contains missing values'\
2772                  %(basename_in+'_va.nc')
2773            raise DataMissingValuesError, msg
2774        else:
2775            vspeed = vspeed*(missing==0) + missing*NaN_filler
2776
2777
2778    missing = (elevations == nan_e)
2779    if sometrue (missing):
2780        if fail_on_NaN:
2781            msg = 'NetCDFFile %s contains missing values'\
2782                  %(basename_in+'_e.nc')
2783            raise DataMissingValuesError, msg
2784        else:
2785            elevations = elevations*(missing==0) + missing*NaN_filler
2786
2787    #######
2788
2789
2790
2791    number_of_times = times.shape[0]
2792    number_of_latitudes = latitudes.shape[0]
2793    number_of_longitudes = longitudes.shape[0]
2794
2795    assert amplitudes.shape[0] == number_of_times
2796    assert amplitudes.shape[1] == number_of_latitudes
2797    assert amplitudes.shape[2] == number_of_longitudes
2798
2799    if verbose:
2800        print '------------------------------------------------'
2801        print 'Statistics:'
2802        print '  Extent (lat/lon):'
2803        print '    lat in [%f, %f], len(lat) == %d'\
2804              %(min(latitudes.flat), max(latitudes.flat),
2805                len(latitudes.flat))
2806        print '    lon in [%f, %f], len(lon) == %d'\
2807              %(min(longitudes.flat), max(longitudes.flat),
2808                len(longitudes.flat))
2809        print '    t in [%f, %f], len(t) == %d'\
2810              %(min(times.flat), max(times.flat), len(times.flat))
2811
2812        q = amplitudes.flat
2813        name = 'Amplitudes (ha) [cm]'
2814        print %s in [%f, %f]' %(name, min(q), max(q))
2815
2816        q = uspeed.flat
2817        name = 'Speeds (ua) [cm/s]'
2818        print %s in [%f, %f]' %(name, min(q), max(q))
2819
2820        q = vspeed.flat
2821        name = 'Speeds (va) [cm/s]'
2822        print %s in [%f, %f]' %(name, min(q), max(q))
2823
2824        q = elevations.flat
2825        name = 'Elevations (e) [m]'
2826        print %s in [%f, %f]' %(name, min(q), max(q))
2827
2828
2829    #print number_of_latitudes, number_of_longitudes
2830    number_of_points = number_of_latitudes*number_of_longitudes
2831    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2832
2833
2834    file_h.close()
2835    file_u.close()
2836    file_v.close()
2837    file_e.close()
2838
2839
2840    # NetCDF file definition
2841    outfile = NetCDFFile(swwname, 'w')
2842
2843    #Create new file
2844    outfile.institution = 'Geoscience Australia'
2845    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2846                          %(basename_in + '_ha.nc',
2847                            basename_in + '_ua.nc',
2848                            basename_in + '_va.nc',
2849                            basename_in + '_e.nc')
2850
2851
2852    #For sww compatibility
2853    outfile.smoothing = 'Yes'
2854    outfile.order = 1
2855
2856    #Start time in seconds since the epoch (midnight 1/1/1970)
2857    outfile.starttime = starttime = times[0]
2858    times = times - starttime  #Store relative times
2859
2860    # dimension definitions
2861    outfile.createDimension('number_of_volumes', number_of_volumes)
2862
2863    outfile.createDimension('number_of_vertices', 3)
2864    outfile.createDimension('number_of_points', number_of_points)
2865
2866
2867    #outfile.createDimension('number_of_timesteps', len(times))
2868    outfile.createDimension('number_of_timesteps', len(times))
2869
2870    # variable definitions
2871    outfile.createVariable('x', precision, ('number_of_points',))
2872    outfile.createVariable('y', precision, ('number_of_points',))
2873    outfile.createVariable('elevation', precision, ('number_of_points',))
2874
2875    #FIXME: Backwards compatibility
2876    outfile.createVariable('z', precision, ('number_of_points',))
2877    #################################
2878
2879    outfile.createVariable('volumes', Int, ('number_of_volumes',
2880                                            'number_of_vertices'))
2881
2882    outfile.createVariable('time', precision,
2883                           ('number_of_timesteps',))
2884
2885    outfile.createVariable('stage', precision,
2886                           ('number_of_timesteps',
2887                            'number_of_points'))
2888
2889    outfile.createVariable('xmomentum', precision,
2890                           ('number_of_timesteps',
2891                            'number_of_points'))
2892
2893    outfile.createVariable('ymomentum', precision,
2894                           ('number_of_timesteps',
2895                            'number_of_points'))
2896
2897
2898    #Store
2899    from anuga.coordinate_transforms.redfearn import redfearn
2900    x = zeros(number_of_points, Float)  #Easting
2901    y = zeros(number_of_points, Float)  #Northing
2902
2903
2904    if verbose: print 'Making triangular grid'
2905    #Check zone boundaries
2906    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2907
2908    vertices = {}
2909    i = 0
2910    for k, lat in enumerate(latitudes):       #Y direction
2911        for l, lon in enumerate(longitudes):  #X direction
2912
2913            vertices[l,k] = i
2914
2915            zone, easting, northing = redfearn(lat,lon)
2916
2917            msg = 'Zone boundary crossed at longitude =', lon
2918            #assert zone == refzone, msg
2919            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2920            x[i] = easting
2921            y[i] = northing
2922            i += 1
2923
2924
2925    #Construct 2 triangles per 'rectangular' element
2926    volumes = []
2927    for l in range(number_of_longitudes-1):    #X direction
2928        for k in range(number_of_latitudes-1): #Y direction
2929            v1 = vertices[l,k+1]
2930            v2 = vertices[l,k]
2931            v3 = vertices[l+1,k+1]
2932            v4 = vertices[l+1,k]
2933
2934            volumes.append([v1,v2,v3]) #Upper element
2935            volumes.append([v4,v3,v2]) #Lower element
2936
2937    volumes = array(volumes)
2938
2939    if origin == None:
2940        zone = refzone
2941        xllcorner = min(x)
2942        yllcorner = min(y)
2943    else:
2944        zone = origin[0]
2945        xllcorner = origin[1]
2946        yllcorner = origin[2]
2947
2948
2949    outfile.xllcorner = xllcorner
2950    outfile.yllcorner = yllcorner
2951    outfile.zone = zone
2952
2953
2954    if elevation is not None:
2955        z = elevation
2956    else:
2957        if inverted_bathymetry:
2958            z = -1*elevations
2959        else:
2960            z = elevations
2961    #FIXME: z should be obtained from MOST and passed in here
2962
2963    from Numeric import resize
2964    z = resize(z,outfile.variables['z'][:].shape)
2965    outfile.variables['x'][:] = x - xllcorner
2966    outfile.variables['y'][:] = y - yllcorner
2967    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2968    outfile.variables['elevation'][:] = z
2969    outfile.variables['time'][:] = times   #Store time relative
2970    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2971
2972
2973
2974    #Time stepping
2975    stage = outfile.variables['stage']
2976    xmomentum = outfile.variables['xmomentum']
2977    ymomentum = outfile.variables['ymomentum']
2978
2979    if verbose: print 'Converting quantities'
2980    n = len(times)
2981    for j in range(n):
2982        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2983        i = 0
2984        for k in range(number_of_latitudes):      #Y direction
2985            for l in range(number_of_longitudes): #X direction
2986                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2987                stage[j,i] = w
2988                h = w - z[i]
2989                xmomentum[j,i] = uspeed[j,k,l]/100*h
2990                ymomentum[j,i] = vspeed[j,k,l]/100*h
2991                i += 1
2992
2993    #outfile.close()
2994
2995    #FIXME: Refactor using code from file_function.statistics
2996    #Something like print swwstats(swwname)
2997    if verbose:
2998        x = outfile.variables['x'][:]
2999        y = outfile.variables['y'][:]
3000        times = outfile.variables['time'][:]
3001        print '------------------------------------------------'
3002        print 'Statistics of output file:'
3003        print '  Name: %s' %swwname
3004        print '  Reference:'
3005        print '    Lower left corner: [%f, %f]'\
3006              %(xllcorner, yllcorner)
3007        print '    Start time: %f' %starttime
3008        print '  Extent:'
3009        print '    x [m] in [%f, %f], len(x) == %d'\
3010              %(min(x.flat), max(x.flat), len(x.flat))
3011        print '    y [m] in [%f, %f], len(y) == %d'\
3012              %(min(y.flat), max(y.flat), len(y.flat))
3013        print '    t [s] in [%f, %f], len(t) == %d'\
3014              %(min(times), max(times), len(times))
3015        print '  Quantities [SI units]:'
3016        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3017            q = outfile.variables[name][:].flat
3018            print '    %s in [%f, %f]' %(name, min(q), max(q))
3019
3020
3021
3022    outfile.close()
3023
3024
3025
3026
3027
3028def timefile2netcdf(filename, quantity_names = None):
3029    """Template for converting typical text files with time series to
3030    NetCDF tms file.
3031
3032
3033    The file format is assumed to be either two fields separated by a comma:
3034
3035        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3036
3037    E.g
3038
3039      31/08/04 00:00:00, 1.328223 0 0
3040      31/08/04 00:15:00, 1.292912 0 0
3041
3042    will provide a time dependent function f(t) with three attributes
3043
3044    filename is assumed to be the rootname with extenisons .txt and .sww
3045    """
3046
3047    import time, calendar
3048    from Numeric import array
3049    from anuga.config import time_format
3050    from anuga.utilities.numerical_tools import ensure_numeric
3051
3052
3053
3054    fid = open(filename + '.txt')
3055    line = fid.readline()
3056    fid.close()
3057
3058    fields = line.split(',')
3059    msg = 'File %s must have the format date, value0 value1 value2 ...'
3060    assert len(fields) == 2, msg
3061
3062    try:
3063        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3064    except ValueError:
3065        msg = 'First field in file %s must be' %filename
3066        msg += ' date-time with format %s.\n' %time_format
3067        msg += 'I got %s instead.' %fields[0]
3068        raise DataTimeError, msg
3069
3070
3071    #Split values
3072    values = []
3073    for value in fields[1].split():
3074        values.append(float(value))
3075
3076    q = ensure_numeric(values)
3077
3078    msg = 'ERROR: File must contain at least one independent value'
3079    assert len(q.shape) == 1, msg
3080
3081
3082
3083    #Read times proper
3084    from Numeric import zeros, Float, alltrue
3085    from anuga.config import time_format
3086    import time, calendar
3087
3088    fid = open(filename + '.txt')
3089    lines = fid.readlines()
3090    fid.close()
3091
3092    N = len(lines)
3093    d = len(q)
3094
3095    T = zeros(N, Float)       #Time
3096    Q = zeros((N, d), Float)  #Values
3097
3098    for i, line in enumerate(lines):
3099        fields = line.split(',')
3100        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3101
3102        T[i] = realtime - starttime
3103
3104        for j, value in enumerate(fields[1].split()):
3105            Q[i, j] = float(value)
3106
3107    msg = 'File %s must list time as a monotonuosly ' %filename
3108    msg += 'increasing sequence'
3109    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3110
3111
3112    #Create NetCDF file
3113    from Scientific.IO.NetCDF import NetCDFFile
3114
3115    fid = NetCDFFile(filename + '.tms', 'w')
3116
3117
3118    fid.institution = 'Geoscience Australia'
3119    fid.description = 'Time series'
3120
3121
3122    #Reference point
3123    #Start time in seconds since the epoch (midnight 1/1/1970)
3124    #FIXME: Use Georef
3125    fid.starttime = starttime
3126
3127
3128    # dimension definitions
3129    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3130    #fid.createDimension('number_of_vertices', 3)
3131
3132
3133    fid.createDimension('number_of_timesteps', len(T))
3134
3135    fid.createVariable('time', Float, ('number_of_timesteps',))
3136
3137    fid.variables['time'][:] = T
3138
3139    for i in range(Q.shape[1]):
3140        try:
3141            name = quantity_names[i]
3142        except:
3143            name = 'Attribute%d'%i
3144
3145        fid.createVariable(name, Float, ('number_of_timesteps',))
3146        fid.variables[name][:] = Q[:,i]
3147
3148    fid.close()
3149
3150
3151def extent_sww(file_name):
3152    """
3153    Read in an sww file.
3154
3155    Input;
3156    file_name - the sww file
3157
3158    Output;
3159    z - Vector of bed elevation
3160    volumes - Array.  Each row has 3 values, representing
3161    the vertices that define the volume
3162    time - Vector of the times where there is stage information
3163    stage - array with respect to time and vertices (x,y)
3164    """
3165
3166
3167    from Scientific.IO.NetCDF import NetCDFFile
3168
3169    #Check contents
3170    #Get NetCDF
3171    fid = NetCDFFile(file_name, 'r')
3172
3173    # Get the variables
3174    x = fid.variables['x'][:]
3175    y = fid.variables['y'][:]
3176    stage = fid.variables['stage'][:]
3177    #print "stage",stage
3178    #print "stage.shap",stage.shape
3179    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3180    #print "min(stage)",min(stage)
3181
3182    fid.close()
3183
3184    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3185
3186
3187def sww2domain(filename,boundary=None,t=None,\
3188               fail_if_NaN=True,NaN_filler=0\
3189               ,verbose = False,very_verbose = False):
3190    """
3191    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3192
3193    Boundary is not recommended if domain.smooth is not selected, as it
3194    uses unique coordinates, but not unique boundaries. This means that
3195    the boundary file will not be compatable with the coordinates, and will
3196    give a different final boundary, or crash.
3197    """
3198    NaN=9.969209968386869e+036
3199    #initialise NaN.
3200
3201    from Scientific.IO.NetCDF import NetCDFFile
3202    from shallow_water import Domain
3203    from Numeric import asarray, transpose, resize
3204
3205    if verbose: print 'Reading from ', filename
3206    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3207    time = fid.variables['time']       #Timesteps
3208    if t is None:
3209        t = time[-1]
3210    time_interp = get_time_interp(time,t)
3211
3212    # Get the variables as Numeric arrays
3213    x = fid.variables['x'][:]             #x-coordinates of vertices
3214    y = fid.variables['y'][:]             #y-coordinates of vertices
3215    elevation = fid.variables['elevation']     #Elevation
3216    stage = fid.variables['stage']     #Water level
3217    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3218    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3219
3220    starttime = fid.starttime[0]
3221    volumes = fid.variables['volumes'][:] #Connectivity
3222    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3223
3224    conserved_quantities = []
3225    interpolated_quantities = {}
3226    other_quantities = []
3227
3228    # get geo_reference
3229    #sww files don't have to have a geo_ref
3230    try:
3231        geo_reference = Geo_reference(NetCDFObject=fid)
3232    except: #AttributeError, e:
3233        geo_reference = None
3234
3235    if verbose: print '    getting quantities'
3236    for quantity in fid.variables.keys():
3237        dimensions = fid.variables[quantity].dimensions
3238        if 'number_of_timesteps' in dimensions:
3239            conserved_quantities.append(quantity)
3240            interpolated_quantities[quantity]=\
3241                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3242        else: other_quantities.append(quantity)
3243
3244    other_quantities.remove('x')
3245    other_quantities.remove('y')
3246    other_quantities.remove('z')
3247    other_quantities.remove('volumes')
3248
3249    conserved_quantities.remove('time')
3250
3251    if verbose: print '    building domain'
3252    #    From domain.Domain:
3253    #    domain = Domain(coordinates, volumes,\
3254    #                    conserved_quantities = conserved_quantities,\
3255    #                    other_quantities = other_quantities,zone=zone,\
3256    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3257
3258    #   From shallow_water.Domain:
3259    coordinates=coordinates.tolist()
3260    volumes=volumes.tolist()
3261    #FIXME:should this be in mesh?(peter row)
3262    if fid.smoothing == 'Yes': unique = False
3263    else: unique = True
3264    if unique:
3265        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3266
3267
3268    try:
3269        domain = Domain(coordinates, volumes, boundary)
3270    except AssertionError, e:
3271        fid.close()
3272        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3273        raise DataDomainError, msg
3274
3275    if not boundary is None:
3276        domain.boundary = boundary
3277
3278    domain.geo_reference = geo_reference
3279
3280    domain.starttime=float(starttime)+float(t)
3281    domain.time=0.0
3282
3283    for quantity in other_quantities:
3284        try:
3285            NaN = fid.variables[quantity].missing_value
3286        except:
3287            pass #quantity has no missing_value number
3288        X = fid.variables[quantity][:]
3289        if very_verbose:
3290            print '       ',quantity
3291            print '        NaN =',NaN
3292            print '        max(X)'
3293            print '       ',max(X)
3294            print '        max(X)==NaN'
3295            print '       ',max(X)==NaN
3296            print ''
3297        if (max(X)==NaN) or (min(X)==NaN):
3298            if fail_if_NaN:
3299                msg = 'quantity "%s" contains no_data entry'%quantity
3300                raise DataMissingValuesError, msg
3301            else:
3302                data = (X<>NaN)
3303                X = (X*data)+(data==0)*NaN_filler
3304        if unique:
3305            X = resize(X,(len(X)/3,3))
3306        domain.set_quantity(quantity,X)
3307    #
3308    for quantity in conserved_quantities:
3309        try:
3310            NaN = fid.variables[quantity].missing_value
3311        except:
3312            pass #quantity has no missing_value number
3313        X = interpolated_quantities[quantity]
3314        if very_verbose:
3315            print '       ',quantity
3316            print '        NaN =',NaN
3317            print '        max(X)'
3318            print '       ',max(X)
3319            print '        max(X)==NaN'
3320            print '       ',max(X)==NaN
3321            print ''
3322        if (max(X)==NaN) or (min(X)==NaN):
3323            if fail_if_NaN:
3324                msg = 'quantity "%s" contains no_data entry'%quantity
3325                raise DataMissingValuesError, msg
3326            else:
3327                data = (X<>NaN)
3328                X = (X*data)+(data==0)*NaN_filler
3329        if unique:
3330            X = resize(X,(X.shape[0]/3,3))
3331        domain.set_quantity(quantity,X)
3332
3333    fid.close()
3334    return domain
3335
3336def interpolated_quantity(saved_quantity,time_interp):
3337
3338    #given an index and ratio, interpolate quantity with respect to time.
3339    index,ratio = time_interp
3340    Q = saved_quantity
3341    if ratio > 0:
3342        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3343    else:
3344        q = Q[index]
3345    #Return vector of interpolated values
3346    return q
3347
3348def get_time_interp(time,t=None):
3349    #Finds the ratio and index for time interpolation.
3350    #It is borrowed from previous abstract_2d_finite_volumes code.
3351    if t is None:
3352        t=time[-1]
3353        index = -1
3354        ratio = 0.
3355    else:
3356        T = time
3357        tau = t
3358        index=0
3359        msg = 'Time interval derived from file %s [%s:%s]'\
3360            %('FIXMEfilename', T[0], T[-1])
3361        msg += ' does not match model time: %s' %tau
3362        if tau < time[0]: raise DataTimeError, msg
3363        if tau > time[-1]: raise DataTimeError, msg
3364        while tau > time[index]: index += 1
3365        while tau < time[index]: index -= 1
3366        if tau == time[index]:
3367            #Protect against case where tau == time[-1] (last time)
3368            # - also works in general when tau == time[i]
3369            ratio = 0
3370        else:
3371            #t is now between index and index+1
3372            ratio = (tau - time[index])/(time[index+1] - time[index])
3373    return (index,ratio)
3374
3375
3376def weed(coordinates,volumes,boundary = None):
3377    if type(coordinates)=='array':
3378        coordinates = coordinates.tolist()
3379    if type(volumes)=='array':
3380        volumes = volumes.tolist()
3381
3382    unique = False
3383    point_dict = {}
3384    same_point = {}
3385    for i in range(len(coordinates)):
3386        point = tuple(coordinates[i])
3387        if point_dict.has_key(point):
3388            unique = True
3389            same_point[i]=point
3390            #to change all point i references to point j
3391        else:
3392            point_dict[point]=i
3393            same_point[i]=point
3394
3395    coordinates = []
3396    i = 0
3397    for point in point_dict.keys():
3398        point = tuple(point)
3399        coordinates.append(list(point))
3400        point_dict[point]=i
3401        i+=1
3402
3403
3404    for volume in volumes:
3405        for i in range(len(volume)):
3406            index = volume[i]
3407            if index>-1:
3408                volume[i]=point_dict[same_point[index]]
3409
3410    new_boundary = {}
3411    if not boundary is None:
3412        for segment in boundary.keys():
3413            point0 = point_dict[same_point[segment[0]]]
3414            point1 = point_dict[same_point[segment[1]]]
3415            label = boundary[segment]
3416            #FIXME should the bounday attributes be concaterated
3417            #('exterior, pond') or replaced ('pond')(peter row)
3418
3419            if new_boundary.has_key((point0,point1)):
3420                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3421                                              #+','+label
3422
3423            elif new_boundary.has_key((point1,point0)):
3424                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3425                                              #+','+label
3426            else: new_boundary[(point0,point1)]=label
3427
3428        boundary = new_boundary
3429
3430    return coordinates,volumes,boundary
3431
3432
3433def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3434                 verbose=False):
3435    """Read Digitial Elevation model from the following NetCDF format (.dem)
3436
3437    Example:
3438
3439    ncols         3121
3440    nrows         1800
3441    xllcorner     722000
3442    yllcorner     5893000
3443    cellsize      25
3444    NODATA_value  -9999
3445    138.3698 137.4194 136.5062 135.5558 ..........
3446
3447    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3448    """
3449
3450    import os
3451    from Scientific.IO.NetCDF import NetCDFFile
3452    from Numeric import Float, zeros, sum, reshape, equal
3453
3454    root = basename_in
3455    inname = root + '.dem'
3456
3457    #Open existing netcdf file to read
3458    infile = NetCDFFile(inname, 'r')
3459    if verbose: print 'Reading DEM from %s' %inname
3460
3461    #Read metadata
3462    ncols = infile.ncols[0]
3463    nrows = infile.nrows[0]
3464    xllcorner = infile.xllcorner[0]
3465    yllcorner = infile.yllcorner[0]
3466    cellsize = infile.cellsize[0]
3467    NODATA_value = infile.NODATA_value[0]
3468    zone = infile.zone[0]
3469    false_easting = infile.false_easting[0]
3470    false_northing = infile.false_northing[0]
3471    projection = infile.projection
3472    datum = infile.datum
3473    units = infile.units
3474
3475    dem_elevation = infile.variables['elevation']
3476
3477    #Get output file name
3478    if basename_out == None:
3479        outname = root + '_' + repr(cellsize_new) + '.dem'
3480    else:
3481        outname = basename_out + '.dem'
3482
3483    if verbose: print 'Write decimated NetCDF file to %s' %outname
3484
3485    #Determine some dimensions for decimated grid
3486    (nrows_stencil, ncols_stencil) = stencil.shape
3487    x_offset = ncols_stencil / 2
3488    y_offset = nrows_stencil / 2
3489    cellsize_ratio = int(cellsize_new / cellsize)
3490    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3491    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3492
3493    #Open netcdf file for output
3494    outfile = NetCDFFile(outname, 'w')
3495
3496    #Create new file
3497    outfile.institution = 'Geoscience Australia'
3498    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3499                           'of spatial point data'
3500    #Georeferencing
3501    outfile.zone = zone
3502    outfile.projection = projection
3503    outfile.datum = datum
3504    outfile.units = units
3505
3506    outfile.cellsize = cellsize_new
3507    outfile.NODATA_value = NODATA_value
3508    outfile.false_easting = false_easting
3509    outfile.false_northing = false_northing
3510
3511    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3512    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3513    outfile.ncols = ncols_new
3514    outfile.nrows = nrows_new
3515
3516    # dimension definition
3517    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3518
3519    # variable definition
3520    outfile.createVariable('elevation', Float, ('number_of_points',))
3521
3522    # Get handle to the variable
3523    elevation = outfile.variables['elevation']
3524
3525    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3526
3527    #Store data
3528    global_index = 0
3529    for i in range(nrows_new):
3530        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3531        lower_index = global_index
3532        telev =  zeros(ncols_new, Float)
3533        local_index = 0
3534        trow = i * cellsize_ratio
3535
3536        for j in range(ncols_new):
3537            tcol = j * cellsize_ratio
3538            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3539
3540            #if dem contains 1 or more NODATA_values set value in
3541            #decimated dem to NODATA_value, else compute decimated
3542            #value using stencil
3543            if sum(sum(equal(tmp, NODATA_value))) > 0:
3544                telev[local_index] = NODATA_value
3545            else:
3546                telev[local_index] = sum(sum(tmp * stencil))
3547
3548            global_index += 1
3549            local_index += 1
3550
3551        upper_index = global_index
3552
3553        elevation[lower_index:upper_index] = telev
3554
3555    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3556
3557    infile.close()
3558    outfile.close()
3559
3560
3561
3562
3563def tsh2sww(filename, verbose=False): #test_tsh2sww
3564    """
3565    to check if a tsh/msh file 'looks' good.
3566    """
3567
3568    from shallow_water import Domain
3569    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
3570    import time, os
3571    #from data_manager import get_dataobject
3572    from os import sep, path
3573    from anuga.utilities.numerical_tools import mean
3574
3575    if verbose == True:print 'Creating domain from', filename
3576    domain = pmesh_to_domain_instance(filename, Domain)
3577    if verbose == True:print "Number of triangles = ", len(domain)
3578
3579    domain.smooth = True
3580    domain.format = 'sww'   #Native netcdf visualisation format
3581    file_path, filename = path.split(filename)
3582    filename, ext = path.splitext(filename)
3583    domain.set_name(filename)   
3584    domain.reduction = mean
3585    if verbose == True:print "file_path",file_path
3586    if file_path == "":file_path = "."
3587    domain.set_datadir(file_path)
3588
3589    if verbose == True:
3590        print "Output written to " + domain.get_datadir() + sep + \
3591              domain.get_name() + "." + domain.format
3592    sww = get_dataobject(domain)
3593    sww.store_connectivity()
3594    sww.store_timestep('stage')
3595
3596
3597def asc_csiro2sww(bath_dir,
3598                  elevation_dir,
3599                  ucur_dir,
3600                  vcur_dir,
3601                  sww_file,
3602                  minlat = None, maxlat = None,
3603                  minlon = None, maxlon = None,
3604                  zscale=1,
3605                  mean_stage = 0,
3606                  fail_on_NaN = True,
3607                  elevation_NaN_filler = 0,
3608                  bath_prefix='ba',
3609                  elevation_prefix='el',
3610                  verbose=False):
3611    """
3612    Produce an sww boundary file, from esri ascii data from CSIRO.
3613
3614    Also convert latitude and longitude to UTM. All coordinates are
3615    assumed to be given in the GDA94 datum.
3616
3617    assume:
3618    All files are in esri ascii format
3619
3620    4 types of information
3621    bathymetry
3622    elevation
3623    u velocity
3624    v velocity
3625
3626    Assumptions
3627    The metadata of all the files is the same
3628    Each type is in a seperate directory
3629    One bath file with extention .000
3630    The time period is less than 24hrs and uniform.
3631    """
3632    from Scientific.IO.NetCDF import NetCDFFile
3633
3634    from anuga.coordinate_transforms.redfearn import redfearn
3635
3636    precision = Float # So if we want to change the precision its done here
3637
3638    # go in to the bath dir and load the only file,
3639    bath_files = os.listdir(bath_dir)
3640    #print "bath_files",bath_files
3641
3642    #fixme: make more general?
3643    bath_file = bath_files[0]
3644    bath_dir_file =  bath_dir + os.sep + bath_file
3645    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3646    #print "bath_metadata",bath_metadata
3647    #print "bath_grid",bath_grid
3648
3649    #Use the date.time of the bath file as a basis for
3650    #the start time for other files
3651    base_start = bath_file[-12:]
3652
3653    #go into the elevation dir and load the 000 file
3654    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3655                         + base_start
3656    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3657    #print "elevation_dir_file",elevation_dir_file
3658    #print "elevation_grid", elevation_grid
3659
3660    elevation_files = os.listdir(elevation_dir)
3661    ucur_files = os.listdir(ucur_dir)
3662    vcur_files = os.listdir(vcur_dir)
3663
3664    # the first elevation file should be the
3665    # file with the same base name as the bath data
3666    #print "elevation_files[0]",elevation_files[0]
3667    #print "'el' + base_start",'el' + base_start
3668    assert elevation_files[0] == 'el' + base_start
3669
3670    #assert bath_metadata == elevation_metadata
3671
3672
3673
3674    number_of_latitudes = bath_grid.shape[0]
3675    number_of_longitudes = bath_grid.shape[1]
3676    #number_of_times = len(os.listdir(elevation_dir))
3677    #number_of_points = number_of_latitudes*number_of_longitudes
3678    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3679
3680    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3681                  for x in range(number_of_longitudes)]
3682    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3683                 for y in range(number_of_latitudes)]
3684
3685     # reverse order of lat, so the fist lat represents the first grid row
3686    latitudes.reverse()
3687
3688    #print "latitudes - before _get_min_max_indexes",latitudes
3689    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3690                                                 minlat=minlat, maxlat=maxlat,
3691                                                 minlon=minlon, maxlon=maxlon)
3692
3693
3694    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3695    #print "bath_grid",bath_grid
3696    latitudes = latitudes[kmin:kmax]
3697    longitudes = longitudes[lmin:lmax]
3698    number_of_latitudes = len(latitudes)
3699    number_of_longitudes = len(longitudes)
3700    number_of_times = len(os.listdir(elevation_dir))
3701    number_of_points = number_of_latitudes*number_of_longitudes
3702    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3703    #print "number_of_points",number_of_points
3704
3705    #Work out the times
3706    if len(elevation_files) > 1:
3707        # Assume: The time period is less than 24hrs.
3708        time_period = (int(elevation_files[1][-3:]) - \
3709                      int(elevation_files[0][-3:]))*60*60
3710        times = [x*time_period for x in range(len(elevation_files))]
3711    else:
3712        times = [0.0]
3713    #print "times", times
3714    #print "number_of_latitudes", number_of_latitudes
3715    #print "number_of_longitudes", number_of_longitudes
3716    #print "number_of_times", number_of_times
3717    #print "latitudes", latitudes
3718    #print "longitudes", longitudes
3719
3720
3721    if verbose:
3722        print '------------------------------------------------'
3723        print 'Statistics:'
3724        print '  Extent (lat/lon):'
3725        print '    lat in [%f, %f], len(lat) == %d'\
3726              %(min(latitudes), max(latitudes),
3727                len(latitudes))
3728        print '    lon in [%f, %f], len(lon) == %d'\
3729              %(min(longitudes), max(longitudes),
3730                len(longitudes))
3731        print '    t in [%f, %f], len(t) == %d'\
3732              %(min(times), max(times), len(times))
3733
3734    ######### WRITE THE SWW FILE #############
3735    # NetCDF file definition
3736    outfile = NetCDFFile(sww_file, 'w')
3737
3738    #Create new file
3739    outfile.institution = 'Geoscience Australia'
3740    outfile.description = 'Converted from XXX'
3741
3742
3743    #For sww compatibility
3744    outfile.smoothing = 'Yes'
3745    outfile.order = 1
3746
3747    #Start time in seconds since the epoch (midnight 1/1/1970)
3748    outfile.starttime = starttime = times[0]
3749
3750
3751    # dimension definitions
3752    outfile.createDimension('number_of_volumes', number_of_volumes)
3753
3754    outfile.createDimension('number_of_vertices', 3)
3755    outfile.createDimension('number_of_points', number_of_points)
3756    outfile.createDimension('number_of_timesteps', number_of_times)
3757
3758    # variable definitions
3759    outfile.createVariable('x', precision, ('number_of_points',))
3760    outfile.createVariable('y', precision, ('number_of_points',))
3761    outfile.createVariable('elevation', precision, ('number_of_points',))
3762
3763    #FIXME: Backwards compatibility
3764    outfile.createVariable('z', precision, ('number_of_points',))
3765    #################################
3766
3767    outfile.createVariable('volumes', Int, ('number_of_volumes',
3768                                            'number_of_vertices'))
3769
3770    outfile.createVariable('time', precision,
3771                           ('number_of_timesteps',))
3772
3773    outfile.createVariable('stage', precision,
3774                           ('number_of_timesteps',
3775                            'number_of_points'))
3776
3777    outfile.createVariable('xmomentum', precision,
3778                           ('number_of_timesteps',
3779                            'number_of_points'))
3780
3781    outfile.createVariable('ymomentum', precision,
3782                           ('number_of_timesteps',
3783                            'number_of_points'))
3784
3785    #Store
3786    from anuga.coordinate_transforms.redfearn import redfearn
3787    x = zeros(number_of_points, Float)  #Easting
3788    y = zeros(number_of_points, Float)  #Northing
3789
3790    if verbose: print 'Making triangular grid'
3791    #Get zone of 1st point.
3792    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3793
3794    vertices = {}
3795    i = 0
3796    for k, lat in enumerate(latitudes):
3797        for l, lon in enumerate(longitudes):
3798
3799            vertices[l,k] = i
3800
3801            zone, easting, northing = redfearn(lat,lon)
3802
3803            msg = 'Zone boundary crossed at longitude =', lon
3804            #assert zone == refzone, msg
3805            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3806            x[i] = easting
3807            y[i] = northing
3808            i += 1
3809
3810
3811    #Construct 2 triangles per 'rectangular' element
3812    volumes = []
3813    for l in range(number_of_longitudes-1):    #X direction
3814        for k in range(number_of_latitudes-1): #Y direction
3815            v1 = vertices[l,k+1]
3816            v2 = vertices[l,k]
3817            v3 = vertices[l+1,k+1]
3818            v4 = vertices[l+1,k]
3819
3820            #Note, this is different to the ferrit2sww code
3821            #since the order of the lats is reversed.
3822            volumes.append([v1,v3,v2]) #Upper element
3823            volumes.append([v4,v2,v3]) #Lower element
3824
3825    volumes = array(volumes)
3826
3827    geo_ref = Geo_reference(refzone,min(x),min(y))
3828    geo_ref.write_NetCDF(outfile)
3829
3830    # This will put the geo ref in the middle
3831    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3832
3833
3834    if verbose:
3835        print '------------------------------------------------'
3836        print 'More Statistics:'
3837        print '  Extent (/lon):'
3838        print '    x in [%f, %f], len(lat) == %d'\
3839              %(min(x), max(x),
3840                len(x))
3841        print '    y in [%f, %f], len(lon) == %d'\
3842              %(min(y), max(y),
3843                len(y))
3844        print 'geo_ref: ',geo_ref
3845
3846    z = resize(bath_grid,outfile.variables['z'][:].shape)
3847    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3848    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3849    outfile.variables['z'][:] = z
3850    outfile.variables['elevation'][:] = z  #FIXME HACK
3851    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3852
3853    # do this to create an ok sww file.
3854    #outfile.variables['time'][:] = [0]   #Store time relative
3855    #outfile.variables['stage'] = z
3856    # put the next line up in the code after outfile.order = 1
3857    #number_of_times = 1
3858
3859    stage = outfile.variables['stage']
3860    xmomentum = outfile.variables['xmomentum']
3861    ymomentum = outfile.variables['ymomentum']
3862
3863    outfile.variables['time'][:] = times   #Store time relative
3864
3865    if verbose: print 'Converting quantities'
3866    n = number_of_times
3867    for j in range(number_of_times):
3868        # load in files
3869        elevation_meta, elevation_grid = \
3870           _read_asc(elevation_dir + os.sep + elevation_files[j])
3871
3872        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3873        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3874
3875        #print "elevation_grid",elevation_grid
3876        #cut matrix to desired size
3877        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3878        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3879        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3880        #print "elevation_grid",elevation_grid
3881        # handle missing values
3882        missing = (elevation_grid == elevation_meta['NODATA_value'])
3883        if sometrue (missing):
3884            if fail_on_NaN:
3885                msg = 'File %s contains missing values'\
3886                      %(elevation_files[j])
3887                raise DataMissingValuesError, msg
3888            else:
3889                elevation_grid = elevation_grid*(missing==0) + \
3890                                 missing*elevation_NaN_filler
3891
3892
3893        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3894        i = 0
3895        for k in range(number_of_latitudes):      #Y direction
3896            for l in range(number_of_longitudes): #X direction
3897                w = zscale*elevation_grid[k,l] + mean_stage
3898                stage[j,i] = w
3899                h = w - z[i]
3900                xmomentum[j,i] = u_momentum_grid[k,l]*h
3901                ymomentum[j,i] = v_momentum_grid[k,l]*h
3902                i += 1
3903    outfile.close()
3904
3905def _get_min_max_indexes(latitudes,longitudes,
3906                        minlat=None, maxlat=None,
3907                        minlon=None, maxlon=None):
3908    """
3909    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3910    specified with min/max lat/long
3911
3912    Think of the latitudes and longitudes describing a 2d surface.
3913    The area returned is, if possible, just big enough to cover the
3914    inputed max/min area. (This will not be possible if the max/min area
3915    has a section outside of the latitudes/longitudes area.)
3916
3917    assume latitudess & longitudes are sorted,
3918    long - from low to high (west to east, eg 148 - 151)
3919    lat - from high to low (north to south, eg -35 - -38)
3920    """
3921
3922    # reverse order of lat, so it's in ascending order
3923    latitudes.reverse()
3924    largest_lat_index = len(latitudes)-1
3925    #Cut out a smaller extent.
3926    if minlat == None:
3927        lat_min_index = 0
3928    else:
3929        lat_min_index = searchsorted(latitudes, minlat)-1
3930        if lat_min_index <0:
3931            lat_min_index = 0
3932
3933
3934    if maxlat == None:
3935        lat_max_index = largest_lat_index #len(latitudes)
3936    else:
3937        lat_max_index = searchsorted(latitudes, maxlat)
3938        if lat_max_index > largest_lat_index:
3939            lat_max_index = largest_lat_index
3940
3941    if minlon == None:
3942        lon_min_index = 0
3943    else:
3944        lon_min_index = searchsorted(longitudes, minlon)-1
3945        if lon_min_index <0:
3946            lon_min_index = 0
3947
3948    if maxlon == None:
3949        lon_max_index = len(longitudes)
3950    else:
3951        lon_max_index = searchsorted(longitudes, maxlon)
3952
3953    #Take into account that the latitude list was reversed
3954    latitudes.reverse() # Python passes by reference, need to swap back
3955    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3956                                   largest_lat_index - lat_min_index
3957    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3958    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3959
3960    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3961
3962
3963def _read_asc(filename, verbose=False):
3964    """Read esri file from the following ASCII format (.asc)
3965
3966    Example:
3967
3968    ncols         3121
3969    nrows         1800
3970    xllcorner     722000
3971    yllcorner     5893000
3972    cellsize      25
3973    NODATA_value  -9999
3974    138.3698 137.4194 136.5062 135.5558 ..........
3975
3976    """
3977
3978    datafile = open(filename)
3979
3980    if verbose: print 'Reading DEM from %s' %(filename)
3981    lines = datafile.readlines()
3982    datafile.close()
3983
3984    if verbose: print 'Got', len(lines), ' lines'
3985
3986    ncols = int(lines.pop(0).split()[1].strip())
3987    nrows = int(lines.pop(0).split()[1].strip())
3988    xllcorner = float(lines.pop(0).split()[1].strip())
3989    yllcorner = float(lines.pop(0).split()[1].strip())
3990    cellsize = float(lines.pop(0).split()[1].strip())
3991    NODATA_value = float(lines.pop(0).split()[1].strip())
3992
3993    assert len(lines) == nrows
3994
3995    #Store data
3996    grid = []
3997
3998    n = len(lines)
3999    for i, line in enumerate(lines):
4000        cells = line.split()
4001        assert len(cells) == ncols
4002        grid.append(array([float(x) for x in cells]))
4003    grid = array(grid)
4004
4005    return {'xllcorner':xllcorner,
4006            'yllcorner':yllcorner,
4007            'cellsize':cellsize,
4008            'NODATA_value':NODATA_value}, grid
4009
4010
4011
4012    ####  URS 2 SWW  ###
4013
4014lon_name = 'LON'
4015lat_name = 'LAT'
4016time_name = 'TIME'
4017precision = Float # So if we want to change the precision its done here       
4018class Write_nc:
4019    """
4020    Write an nc file.
4021   
4022    Note, this should be checked to meet cdc netcdf conventions for gridded
4023    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4024   
4025    """
4026    def __init__(self,
4027                 quantity_name,
4028                 file_name,
4029                 time_step_count,
4030                 time_step,
4031                 lon,
4032                 lat):
4033        """
4034        time_step_count is the number of time steps.
4035        time_step is the time step size
4036       
4037        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4038        """
4039        self.quantity_name = quantity_name
4040        quantity_units= {'HA':'METERS',
4041                              'UA':'METERS/SECOND',
4042                              'VA':'METERS/SECOND'}
4043       
4044        #self.file_name = file_name
4045        self.time_step_count = time_step_count
4046        self.time_step = time_step
4047
4048        # NetCDF file definition
4049        self.outfile = NetCDFFile(file_name, 'w')
4050        outfile = self.outfile       
4051
4052        #Create new file
4053        nc_lon_lat_header(outfile, lon, lat)
4054   
4055        # TIME
4056        outfile.createDimension(time_name, None)
4057        outfile.createVariable(time_name, precision, (time_name,))
4058
4059        #QUANTITY
4060        outfile.createVariable(self.quantity_name, precision,
4061                               (time_name, lat_name, lon_name))
4062        outfile.variables[self.quantity_name].missing_value=-1.e+034
4063        outfile.variables[self.quantity_name].units= \
4064                                 quantity_units[self.quantity_name]
4065        outfile.variables[lon_name][:]= ensure_numeric(lon)
4066        outfile.variables[lat_name][:]= ensure_numeric(lat)
4067
4068        #Assume no one will be wanting to read this, while we are writing
4069        #outfile.close()
4070       
4071    def store_timestep(self, quantity_slice):
4072        """
4073        quantity_slice is the data to be stored at this time step
4074        """
4075       
4076        outfile = self.outfile
4077       
4078        # Get the variables
4079        time = outfile.variables[time_name]
4080        quantity = outfile.variables[self.quantity_name]
4081           
4082        i = len(time)
4083
4084        #Store time
4085        time[i] = i*self.time_step #self.domain.time
4086        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4087                                           # And m/s to cm/sec
4088       
4089    def close(self):
4090        self.outfile.close()
4091
4092def urs2sww(basename_in='o', basename_out=None, verbose=False,
4093            remove_nc_files=True,
4094            minlat=None, maxlat=None,
4095            minlon= None, maxlon=None,
4096            mint=None, maxt=None,
4097            mean_stage=0,
4098            origin = None,
4099            zscale=1,
4100            fail_on_NaN=True,
4101            NaN_filler=0,
4102            elevation=None):
4103    """
4104    Convert URS C binary format for wave propagation to
4105    sww format native to abstract_2d_finite_volumes.
4106
4107    Specify only basename_in and read files of the form
4108    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4109    relative height, x-velocity and y-velocity, respectively.
4110
4111    Also convert latitude and longitude to UTM. All coordinates are
4112    assumed to be given in the GDA94 datum. The latitude and longitude
4113    information is for  a grid.
4114
4115    min's and max's: If omitted - full extend is used.
4116    To include a value min may equal it, while max must exceed it.
4117    Lat and lon are assumed to be in decimal degrees.
4118    NOTE: min Lon is the most east boundary.
4119   
4120    origin is a 3-tuple with geo referenced
4121    UTM coordinates (zone, easting, northing)
4122    It will be the origin of the sww file. This shouldn't be used,
4123    since all of anuga should be able to handle an arbitary origin.
4124
4125
4126    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4127    which means that latitude is the fastest
4128    varying dimension (row major order, so to speak)
4129
4130    In URS C binary the latitudes and longitudes are in assending order.
4131    """
4132   
4133    if basename_out == None:
4134        basename_out = basename_in
4135    files_out = urs2nc(basename_in, basename_out)
4136    ferret2sww(basename_out,
4137               minlat=minlat,
4138               maxlat=maxlat,
4139               minlon=minlon,
4140               mint=mint,
4141               maxt=maxt,
4142               mean_stage=mean_stage,
4143               origin=origin,
4144               zscale=zscale,
4145               fail_on_NaN=fail_on_NaN,
4146               NaN_filler=NaN_filler,
4147               inverted_bathymetry=True,
4148               verbose=verbose)
4149    #print "files_out",files_out
4150    if remove_nc_files:
4151        for file_out in files_out:
4152            os.remove(file_out)
4153   
4154def urs2nc(basename_in = 'o', basename_out = 'urs'):
4155    """
4156    Convert the 3 urs files to 4 nc files.
4157
4158    The name of the urs file names must be;
4159    [basename_in]-z-mux
4160    [basename_in]-e-mux
4161    [basename_in]-n-mux
4162   
4163    """
4164    files_in = [basename_in+'-z-mux',
4165                basename_in+'-e-mux',
4166                basename_in+'-n-mux']
4167    files_out = [basename_out+'_ha.nc',
4168                 basename_out+'_ua.nc',
4169                 basename_out+'_va.nc']
4170    quantities = ['HA','UA','VA']
4171
4172    for file_name in files_in:
4173        if os.access(file_name, os.F_OK) == 0 :
4174            msg = 'File %s does not exist or is not accessible' %file_name
4175            raise IOError, msg
4176       
4177    hashed_elevation = None
4178    for file_in, file_out, quantity in map(None, files_in,
4179                                           files_out,
4180                                           quantities):
4181        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4182                                         file_out,
4183                                         quantity)
4184        #print "lonlatdep", lonlatdep
4185        if hashed_elevation == None:
4186            elevation_file = basename_out+'_e.nc'
4187            write_elevation_nc(elevation_file,
4188                                lon,
4189                                lat,
4190                                depth)
4191            hashed_elevation = myhash(lonlatdep)
4192        else:
4193            msg = "The elevation information in the mux files is inconsistent"
4194            assert hashed_elevation == myhash(lonlatdep), msg
4195    files_out.append(elevation_file)
4196    return files_out
4197   
4198def _binary_c2nc(file_in, file_out, quantity):
4199    """
4200    Reads in a quantity urs file and writes a quantity nc file.
4201    additionally, returns the depth and lat, long info,
4202    so it can be written to a file.
4203    """
4204    columns = 3 # long, lat , depth
4205    mux_file = open(file_in, 'rb')
4206   
4207    # Number of points/stations
4208    (points_num,)= unpack('i',mux_file.read(4))
4209
4210    # nt, int - Number of time steps
4211    (time_step_count,)= unpack('i',mux_file.read(4))
4212
4213    #dt, float - time step, seconds
4214    (time_step,) = unpack('f', mux_file.read(4))
4215   
4216    msg = "Bad data in the mux file."
4217    if points_num < 0:
4218        mux_file.close()
4219        raise ANUGAError, msg
4220    if time_step_count < 0:
4221        mux_file.close()
4222        raise ANUGAError, msg
4223    if time_step < 0:
4224        mux_file.close()
4225        raise ANUGAError, msg
4226   
4227    lonlatdep = p_array.array('f')
4228    lonlatdep.read(mux_file, columns * points_num)
4229    lonlatdep = array(lonlatdep, typecode=Float)   
4230    lonlatdep = reshape(lonlatdep, (points_num, columns))
4231   
4232    lon, lat, depth = lon_lat2grid(lonlatdep)
4233    lon_sorted = list(lon)
4234    lon_sorted.sort()
4235
4236    if not lon == lon_sorted:
4237        msg = "Longitudes in mux file are not in ascending order"
4238        raise IOError, msg
4239    lat_sorted = list(lat)
4240    lat_sorted.sort()
4241
4242    if not lat == lat_sorted:
4243        msg = "Latitudes in mux file are not in ascending order"
4244   
4245    nc_file = Write_nc(quantity,
4246                       file_out,
4247                       time_step_count,
4248                       time_step,
4249                       lon,
4250                       lat)
4251
4252    for i in range(time_step_count):
4253        #Read in a time slice  from mux file 
4254        hz_p_array = p_array.array('f')
4255        hz_p_array.read(mux_file, points_num)
4256        hz_p = array(hz_p_array, typecode=Float)
4257        hz_p = reshape(hz_p, (len(lon), len(lat)))
4258        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4259
4260        #write time slice to nc file
4261        nc_file.store_timestep(hz_p)
4262    mux_file.close()
4263    nc_file.close()
4264
4265    return lonlatdep, lon, lat, depth
4266   
4267
4268def write_elevation_nc(file_out, lon, lat, depth_vector):
4269    """
4270    Write an nc elevation file.
4271    """
4272   
4273    # NetCDF file definition
4274    outfile = NetCDFFile(file_out, 'w')
4275
4276    #Create new file
4277    nc_lon_lat_header(outfile, lon, lat)
4278   
4279    # ELEVATION
4280    zname = 'ELEVATION'
4281    outfile.createVariable(zname, precision, (lat_name, lon_name))
4282    outfile.variables[zname].units='CENTIMETERS'
4283    outfile.variables[zname].missing_value=-1.e+034
4284
4285    outfile.variables[lon_name][:]= ensure_numeric(lon)
4286    outfile.variables[lat_name][:]= ensure_numeric(lat)
4287
4288    depth = reshape(depth_vector, ( len(lat), len(lon)))
4289    outfile.variables[zname][:]= depth
4290   
4291    outfile.close()
4292   
4293def nc_lon_lat_header(outfile, lon, lat):
4294    """
4295    outfile is the netcdf file handle.
4296    lon - a list/array of the longitudes
4297    lat - a list/array of the latitudes
4298    """
4299   
4300    outfile.institution = 'Geoscience Australia'
4301    outfile.description = 'Converted from URS binary C'
4302   
4303    # Longitude
4304    outfile.createDimension(lon_name, len(lon))
4305    outfile.createVariable(lon_name, precision, (lon_name,))
4306    outfile.variables[lon_name].point_spacing='uneven'
4307    outfile.variables[lon_name].units='degrees_east'
4308    outfile.variables[lon_name].assignValue(lon)
4309
4310
4311    # Latitude
4312    outfile.createDimension(lat_name, len(lat))
4313    outfile.createVariable(lat_name, precision, (lat_name,))
4314    outfile.variables[lat_name].point_spacing='uneven'
4315    outfile.variables[lat_name].units='degrees_north'
4316    outfile.variables[lat_name].assignValue(lat)
4317
4318
4319   
4320def lon_lat2grid(long_lat_dep):
4321    """
4322    given a list of points that are assumed to be an a grid,
4323    return the long's and lat's of the grid.
4324    long_lat_dep is an array where each row is a position.
4325    The first column is longitudes.
4326    The second column is latitudes.
4327
4328    The latitude is the fastest varying dimension - in mux files
4329    """
4330    LONG = 0
4331    LAT = 1
4332    QUANTITY = 2
4333
4334    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4335   
4336    num_points = long_lat_dep.shape[0]
4337    this_rows_long = long_lat_dep[0,LONG]
4338
4339    # Count the length of unique latitudes
4340    i = 0
4341    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4342        i += 1
4343    # determine the lats and longsfrom the grid
4344    lat = long_lat_dep[:i, LAT]       
4345    long = long_lat_dep[::i, LONG]
4346   
4347    lenlong = len(long)
4348    lenlat = len(lat)
4349    #print 'len lat', lat, len(lat)
4350    #print 'len long', long, len(long)
4351         
4352    msg = 'Input data is not gridded'     
4353    assert num_points % lenlat == 0, msg
4354    assert num_points % lenlong == 0, msg
4355         
4356    # Test that data is gridded       
4357    for i in range(lenlong):
4358        msg = 'Data is not gridded.  It must be for this operation'
4359        first = i*lenlat
4360        last = first + lenlat
4361               
4362        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4363        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4364   
4365   
4366#    print 'range long', min(long), max(long)
4367#    print 'range lat', min(lat), max(lat)
4368#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4369#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4370   
4371   
4372   
4373    msg = 'Out of range latitudes/longitudes'
4374    for l in lat:assert -90 < l < 90 , msg
4375    for l in long:assert -180 < l < 180 , msg
4376
4377    # Changing quantity from lat being the fastest varying dimension to
4378    # long being the fastest varying dimension
4379    # FIXME - make this faster/do this a better way
4380    # use numeric transpose, after reshaping the quantity vector
4381#    quantity = zeros(len(long_lat_dep), Float)
4382    quantity = zeros(num_points, Float)
4383   
4384#    print 'num',num_points
4385    for lat_i, _ in enumerate(lat):
4386        for long_i, _ in enumerate(long):
4387            q_index = lat_i*lenlong+long_i
4388            lld_index = long_i*lenlat+lat_i
4389#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4390            temp = long_lat_dep[lld_index, QUANTITY]
4391            quantity[q_index] = temp
4392           
4393    return long, lat, quantity
4394
4395    ####  END URS 2 SWW  ###     
4396#-------------------------------------------------------------
4397if __name__ == "__main__":
4398    pass
4399
Note: See TracBrowser for help on using the repository browser.