source: anuga_core/source/anuga/shallow_water/data_manager.py @ 3967

Last change on this file since 3967 was 3967, checked in by sexton, 18 years ago

update sww2dem so that export can occur at given timestep

File size: 134.5 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
66     searchsorted, zeros, allclose, around, reshape, transpose
67from Scientific.IO.NetCDF import NetCDFFile
68
69
70from anuga.coordinate_transforms.geo_reference import Geo_reference
71from anuga.geospatial_data.geospatial_data import Geospatial_data
72from anuga.config import minimum_storable_height as default_minimum_storable_height
73from anuga.utilities.numerical_tools import ensure_numeric
74from anuga.caching.caching import myhash
75from anuga.utilities.anuga_exceptions import ANUGAError
76
77def make_filename(s):
78    """Transform argument string into a suitable filename
79    """
80
81    s = s.strip()
82    s = s.replace(' ', '_')
83    s = s.replace('(', '')
84    s = s.replace(')', '')
85    s = s.replace('__', '_')
86
87    return s
88
89
90def check_dir(path, verbose=None):
91    """Check that specified path exists.
92    If path does not exist it will be created if possible
93
94    USAGE:
95       checkdir(path, verbose):
96
97    ARGUMENTS:
98        path -- Directory
99        verbose -- Flag verbose output (default: None)
100
101    RETURN VALUE:
102        Verified path including trailing separator
103
104    """
105
106    import os, sys
107    import os.path
108
109    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
110        unix = 0
111    else:
112        unix = 1
113
114
115    if path[-1] != os.sep:
116        path = path + os.sep  # Add separator for directories
117
118    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
119    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
120        try:
121            exitcode=os.mkdir(path)
122
123            # Change access rights if possible
124            #
125            if unix:
126                exitcode=os.system('chmod 775 '+path)
127            else:
128                pass  # FIXME: What about acces rights under Windows?
129
130            if verbose: print 'MESSAGE: Directory', path, 'created.'
131
132        except:
133            print 'WARNING: Directory', path, 'could not be created.'
134            if unix:
135                path = '/tmp/'
136            else:
137                path = 'C:'
138
139            print 'Using directory %s instead' %path
140
141    return(path)
142
143
144
145def del_dir(path):
146    """Recursively delete directory path and all its contents
147    """
148
149    import os
150
151    if os.path.isdir(path):
152        for file in os.listdir(path):
153            X = os.path.join(path, file)
154
155
156            if os.path.isdir(X) and not os.path.islink(X):
157                del_dir(X)
158            else:
159                try:
160                    os.remove(X)
161                except:
162                    print "Could not remove file %s" %X
163
164        os.rmdir(path)
165
166
167
168def create_filename(datadir, filename, format, size=None, time=None):
169
170    import os
171    #from anuga.config import data_dir
172
173    FN = check_dir(datadir) + filename
174
175    if size is not None:
176        FN += '_size%d' %size
177
178    if time is not None:
179        FN += '_time%.2f' %time
180
181    FN += '.' + format
182    return FN
183
184
185def get_files(datadir, filename, format, size):
186    """Get all file (names) with gven name, size and format
187    """
188
189    import glob
190
191    import os
192    #from anuga.config import data_dir
193
194    dir = check_dir(datadir)
195
196    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
197    return glob.glob(pattern)
198
199
200
201#Generic class for storing output to e.g. visualisation or checkpointing
202class Data_format:
203    """Generic interface to data formats
204    """
205
206
207    def __init__(self, domain, extension, mode = 'w'):
208        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
209                                        '''   'w' (write)'''+\
210                                        '''   'r' (read)''' +\
211                                        '''   'a' (append)'''
212
213        #Create filename
214        self.filename = create_filename(domain.get_datadir(),
215                                        domain.get_name(), extension)
216
217        #print 'F', self.filename
218        self.timestep = 0
219        self.domain = domain
220       
221
222
223        # Exclude ghosts in case this is a parallel domain
224        self.number_of_nodes = domain.number_of_full_nodes       
225        self.number_of_volumes = domain.number_of_full_triangles
226        #self.number_of_volumes = len(domain)       
227
228
229
230
231        #FIXME: Should we have a general set_precision function?
232
233
234
235#Class for storing output to e.g. visualisation
236class Data_format_sww(Data_format):
237    """Interface to native NetCDF format (.sww) for storing model output
238
239    There are two kinds of data
240
241    1: Constant data: Vertex coordinates and field values. Stored once
242    2: Variable data: Conserved quantities. Stored once per timestep.
243
244    All data is assumed to reside at vertex locations.
245    """
246
247
248    def __init__(self, domain, mode = 'w',\
249                 max_size = 2000000000,
250                 recursion = False):
251        from Scientific.IO.NetCDF import NetCDFFile
252        from Numeric import Int, Float, Float32
253
254        self.precision = Float32 #Use single precision for quantities
255        if hasattr(domain, 'max_size'):
256            self.max_size = domain.max_size #file size max is 2Gig
257        else:
258            self.max_size = max_size
259        self.recursion = recursion
260        self.mode = mode
261
262        Data_format.__init__(self, domain, 'sww', mode)
263
264        if hasattr(domain, 'minimum_storable_height'):
265            self.minimum_storable_height =  domain.minimum_storable_height
266        else:
267            self.minimum_storable_height = default_minimum_storable_height
268
269        # NetCDF file definition
270        fid = NetCDFFile(self.filename, mode)
271
272        if mode == 'w':
273
274            #Create new file
275            fid.institution = 'Geoscience Australia'
276            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
277
278            if domain.smooth:
279                fid.smoothing = 'Yes'
280            else:
281                fid.smoothing = 'No'
282
283            fid.order = domain.default_order
284
285            if hasattr(domain, 'texture'):
286                fid.texture = domain.texture
287            #else:
288            #    fid.texture = 'None'
289
290            #Reference point
291            #Start time in seconds since the epoch (midnight 1/1/1970)
292            #FIXME: Use Georef
293            fid.starttime = domain.starttime
294
295            # dimension definitions
296            fid.createDimension('number_of_volumes', self.number_of_volumes)
297            fid.createDimension('number_of_vertices', 3)
298
299            if domain.smooth is True:
300                #fid.createDimension('number_of_points', len(domain.vertexlist))
301                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
302
303                # FIXME(Ole): This will cause sww files for paralle domains to
304                # have ghost nodes stored (but not used by triangles).
305                # To clean this up, we have to change get_vertex_values and friends in
306                # quantity.py (but I can't be bothered right now)
307            else:
308                fid.createDimension('number_of_points', 3*self.number_of_volumes)
309
310            fid.createDimension('number_of_timesteps', None) #extensible
311
312            # variable definitions
313            fid.createVariable('x', self.precision, ('number_of_points',))
314            fid.createVariable('y', self.precision, ('number_of_points',))
315            fid.createVariable('elevation', self.precision, ('number_of_points',))
316            if domain.geo_reference is not None:
317                domain.geo_reference.write_NetCDF(fid)
318
319            #FIXME: Backwards compatibility
320            fid.createVariable('z', self.precision, ('number_of_points',))
321            #################################
322
323            fid.createVariable('volumes', Int, ('number_of_volumes',
324                                                'number_of_vertices'))
325
326            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
327                                               # close to each other may appear as the same step
328                               ('number_of_timesteps',))
329
330            fid.createVariable('stage', self.precision,
331                               ('number_of_timesteps',
332                                'number_of_points'))
333
334            fid.createVariable('xmomentum', self.precision,
335                               ('number_of_timesteps',
336                                'number_of_points'))
337
338            fid.createVariable('ymomentum', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342        #Close
343        fid.close()
344
345
346    def store_connectivity(self):
347        """Specialisation of store_connectivity for net CDF format
348
349        Writes x,y,z coordinates of triangles constituting
350        the bed elevation.
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354
355        from Numeric import concatenate, Int
356
357        domain = self.domain
358
359        #Get NetCDF
360        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
361
362        # Get the variables
363        x = fid.variables['x']
364        y = fid.variables['y']
365        z = fid.variables['elevation']
366
367        volumes = fid.variables['volumes']
368
369        # Get X, Y and bed elevation Z
370        Q = domain.quantities['elevation']
371        X,Y,Z,V = Q.get_vertex_values(xy=True,
372                                      precision=self.precision)
373
374        volumes[:] = V.astype(volumes.typecode())
375        x[:] = X
376        y[:] = Y
377        z[:] = Z
378
379        #FIXME: Backwards compatibility
380        z = fid.variables['z']
381        z[:] = Z
382        ################################
383
384
385
386        #Close
387        fid.close()
388
389    def store_timestep(self, names):
390        """Store time and named quantities to file
391        """
392        from Scientific.IO.NetCDF import NetCDFFile
393        import types
394        from time import sleep
395        from os import stat
396
397        from Numeric import choose
398
399        #Get NetCDF
400        retries = 0
401        file_open = False
402        while not file_open and retries < 10:
403            try:
404                fid = NetCDFFile(self.filename, 'a') #Open existing file
405            except IOError:
406                #This could happen if someone was reading the file.
407                #In that case, wait a while and try again
408                msg = 'Warning (store_timestep): File %s could not be opened'\
409                      %self.filename
410                msg += ' - trying step %s again' %self.domain.time
411                print msg
412                retries += 1
413                sleep(1)
414            else:
415                file_open = True
416
417        if not file_open:
418            msg = 'File %s could not be opened for append' %self.filename
419            raise DataFileNotOpenError, msg
420
421
422
423        #Check to see if the file is already too big:
424        time = fid.variables['time']
425        i = len(time)+1
426        file_size = stat(self.filename)[6]
427        file_size_increase =  file_size/i
428        if file_size + file_size_increase > self.max_size*(2**self.recursion):
429            #in order to get the file name and start time correct,
430            #I change the domian.filename and domain.starttime.
431            #This is the only way to do this without changing
432            #other modules (I think).
433
434            #write a filename addon that won't break swollens reader
435            #(10.sww is bad)
436            filename_ext = '_time_%s'%self.domain.time
437            filename_ext = filename_ext.replace('.', '_')
438            #remember the old filename, then give domain a
439            #name with the extension
440            old_domain_filename = self.domain.get_name()
441            if not self.recursion:
442                self.domain.set_name(old_domain_filename+filename_ext)
443
444
445            #change the domain starttime to the current time
446            old_domain_starttime = self.domain.starttime
447            self.domain.starttime = self.domain.time
448
449            #build a new data_structure.
450            next_data_structure=\
451                Data_format_sww(self.domain, mode=self.mode,\
452                                max_size = self.max_size,\
453                                recursion = self.recursion+1)
454            if not self.recursion:
455                print '    file_size = %s'%file_size
456                print '    saving file to %s'%next_data_structure.filename
457            #set up the new data_structure
458            self.domain.writer = next_data_structure
459
460            #FIXME - could be cleaner to use domain.store_timestep etc.
461            next_data_structure.store_connectivity()
462            next_data_structure.store_timestep(names)
463            fid.sync()
464            fid.close()
465
466            #restore the old starttime and filename
467            self.domain.starttime = old_domain_starttime
468            self.domain.set_name(old_domain_filename)           
469        else:
470            self.recursion = False
471            domain = self.domain
472
473            # Get the variables
474            time = fid.variables['time']
475            stage = fid.variables['stage']
476            xmomentum = fid.variables['xmomentum']
477            ymomentum = fid.variables['ymomentum']
478            i = len(time)
479
480            #Store time
481            time[i] = self.domain.time
482
483
484            if type(names) not in [types.ListType, types.TupleType]:
485                names = [names]
486
487            for name in names:
488                # Get quantity
489                Q = domain.quantities[name]
490                A,V = Q.get_vertex_values(xy = False,
491                                          precision = self.precision)
492
493
494                #FIXME: Make this general (see below)
495                if name == 'stage':
496                    z = fid.variables['elevation']
497                    #print z[:]
498                    #print A-z[:]
499
500
501                    A = choose(A-z[:] >= self.minimum_storable_height,
502                               (z[:], A))
503                    stage[i,:] = A.astype(self.precision)
504                elif name == 'xmomentum':
505                    xmomentum[i,:] = A.astype(self.precision)
506                elif name == 'ymomentum':
507                    ymomentum[i,:] = A.astype(self.precision)
508
509                #As in....
510            #eval( name + '[i,:] = A.astype(self.precision)' )
511            #FIXME: But we need a UNIT test for that before refactoring
512
513
514
515            #Flush and close
516            fid.sync()
517            fid.close()
518
519
520
521#Class for handling checkpoints data
522class Data_format_cpt(Data_format):
523    """Interface to native NetCDF format (.cpt)
524    """
525
526
527    def __init__(self, domain, mode = 'w'):
528        from Scientific.IO.NetCDF import NetCDFFile
529        from Numeric import Int, Float, Float
530
531        self.precision = Float #Use full precision
532
533        Data_format.__init__(self, domain, 'sww', mode)
534
535
536        # NetCDF file definition
537        fid = NetCDFFile(self.filename, mode)
538
539        if mode == 'w':
540            #Create new file
541            fid.institution = 'Geoscience Australia'
542            fid.description = 'Checkpoint data'
543            #fid.smooth = domain.smooth
544            fid.order = domain.default_order
545
546            # dimension definitions
547            fid.createDimension('number_of_volumes', self.number_of_volumes)
548            fid.createDimension('number_of_vertices', 3)
549
550            #Store info at all vertices (no smoothing)
551            fid.createDimension('number_of_points', 3*self.number_of_volumes)
552            fid.createDimension('number_of_timesteps', None) #extensible
553
554            # variable definitions
555
556            #Mesh
557            fid.createVariable('x', self.precision, ('number_of_points',))
558            fid.createVariable('y', self.precision, ('number_of_points',))
559
560
561            fid.createVariable('volumes', Int, ('number_of_volumes',
562                                                'number_of_vertices'))
563
564            fid.createVariable('time', self.precision,
565                               ('number_of_timesteps',))
566
567            #Allocate space for all quantities
568            for name in domain.quantities.keys():
569                fid.createVariable(name, self.precision,
570                                   ('number_of_timesteps',
571                                    'number_of_points'))
572
573        #Close
574        fid.close()
575
576
577    def store_checkpoint(self):
578        """
579        Write x,y coordinates of triangles.
580        Write connectivity (
581        constituting
582        the bed elevation.
583        """
584
585        from Scientific.IO.NetCDF import NetCDFFile
586
587        from Numeric import concatenate
588
589        domain = self.domain
590
591        #Get NetCDF
592        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
593
594        # Get the variables
595        x = fid.variables['x']
596        y = fid.variables['y']
597
598        volumes = fid.variables['volumes']
599
600        # Get X, Y and bed elevation Z
601        Q = domain.quantities['elevation']
602        X,Y,Z,V = Q.get_vertex_values(xy=True,
603                      precision = self.precision)
604
605
606
607        x[:] = X.astype(self.precision)
608        y[:] = Y.astype(self.precision)
609        z[:] = Z.astype(self.precision)
610
611        volumes[:] = V
612
613        #Close
614        fid.close()
615
616
617    def store_timestep(self, name):
618        """Store time and named quantity to file
619        """
620        from Scientific.IO.NetCDF import NetCDFFile
621        from time import sleep
622
623        #Get NetCDF
624        retries = 0
625        file_open = False
626        while not file_open and retries < 10:
627            try:
628                fid = NetCDFFile(self.filename, 'a') #Open existing file
629            except IOError:
630                #This could happen if someone was reading the file.
631                #In that case, wait a while and try again
632                msg = 'Warning (store_timestep): File %s could not be opened'\
633                  %self.filename
634                msg += ' - trying again'
635                print msg
636                retries += 1
637                sleep(1)
638            else:
639                file_open = True
640
641        if not file_open:
642            msg = 'File %s could not be opened for append' %self.filename
643            raise DataFileNotOPenError, msg
644
645
646        domain = self.domain
647
648        # Get the variables
649        time = fid.variables['time']
650        stage = fid.variables['stage']
651        i = len(time)
652
653        #Store stage
654        time[i] = self.domain.time
655
656        # Get quantity
657        Q = domain.quantities[name]
658        A,V = Q.get_vertex_values(xy=False,
659                  precision = self.precision)
660
661        stage[i,:] = A.astype(self.precision)
662
663        #Flush and close
664        fid.sync()
665        fid.close()
666
667
668
669
670
671#Function for storing xya output
672#FIXME Not done yet for this version
673#This is obsolete.  Use geo_spatial_data instead
674class Data_format_xya(Data_format):
675    """Generic interface to data formats
676    """
677
678
679    def __init__(self, domain, mode = 'w'):
680        from Scientific.IO.NetCDF import NetCDFFile
681        from Numeric import Int, Float, Float32
682
683        self.precision = Float32 #Use single precision
684
685        Data_format.__init__(self, domain, 'xya', mode)
686
687
688
689    #FIXME -This is the old xya format
690    def store_all(self):
691        """Specialisation of store all for xya format
692
693        Writes x,y,z coordinates of triangles constituting
694        the bed elevation.
695        """
696
697        from Numeric import concatenate
698
699        domain = self.domain
700
701        fd = open(self.filename, 'w')
702
703
704        if domain.smooth is True:
705            number_of_points =  len(domain.vertexlist)
706        else:
707            number_of_points = 3*self.number_of_volumes
708
709        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
710
711        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
712                 " # <vertex #> <x> <y> [attributes]" + "\n")
713
714
715        # Get X, Y, bed elevation and friction (index=0,1)
716        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
717                                           indices = (0,1), precision = self.precision)
718
719        bed_eles = A[:,0]
720        fricts = A[:,1]
721
722        # Get stage (index=0)
723        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
724                                       indices = (0,), precision = self.precision)
725
726        stages = B[:,0]
727
728        #<vertex #> <x> <y> [attributes]
729        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
730                                               stages, fricts):
731
732            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
733            fd.write(s)
734
735        #close
736        fd.close()
737
738
739    def store_timestep(self, t, V0, V1, V2):
740        """Store time, water heights (and momentums) to file
741        """
742        pass
743
744
745#### NEED national exposure database
746   
747LAT_TITLE = 'LATITUDE'
748LONG_TITLE = 'LONGITUDE'
749X_TITLE = 'x'
750Y_TITLE = 'y'
751class Exposure_csv:
752    def __init__(self,file_name, latitude_title=LAT_TITLE,
753                 longitude_title=LONG_TITLE, is_x_y_locations=None,
754                 x_title=X_TITLE, y_title=Y_TITLE,
755                 refine_polygon=None):
756        """
757        This class is for handling the exposure csv file.
758        It reads the file in and converts the lats and longs to a geospatial
759        data object.
760        Use the methods to read and write columns.
761
762        The format of the csv files it reads is;
763           The first row is a title row.
764           comma's are the delimiters
765           each column is a 'set' of data
766
767        Feel free to use/expand it to read other csv files.
768           
769           
770        It is not for adding and deleting rows
771       
772        Can geospatial handle string attributes? It's not made for them.
773        Currently it can't load and save string att's.
774
775        So just use geospatial to hold the x, y and georef? Bad, since
776        different att's are in diferent structures.  Not so bad, the info
777        to write if the .csv file is saved is in attribute_dic
778
779        The location info is in the geospatial attribute.
780       
781       
782        """
783        self._file_name = file_name
784        self._geospatial = None #
785
786        # self._attribute_dic is a dictionary.
787        #The keys are the column titles.
788        #The values are lists of column data
789       
790        # self._title_index_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are the index positions of file columns.
793        self._attribute_dic, self._title_index_dic = \
794        self._load_exposure_csv(self._file_name)
795        try:
796            lats = self._attribute_dic[latitude_title]
797            longs = self._attribute_dic[longitude_title]
798           
799        except KeyError:
800            # maybe a warning..
801            #Let's see if this works..
802            if False != is_x_y_locations:
803                is_x_y_locations = True
804            pass
805        else:
806            self._geospatial = Geospatial_data(latitudes = lats,
807                 longitudes = longs)
808
809        if is_x_y_locations is True:
810            if self._geospatial is not None:
811                pass #fixme throw an error
812            try:
813                xs = self._attribute_dic[x_title]
814                ys = self._attribute_dic[y_title]
815                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
816            except KeyError:
817                # maybe a warning..
818                msg = "Could not find location information."
819                raise TitleValueError, msg
820            else:
821                self._geospatial = Geospatial_data(data_points=points)
822           
823        # create a list of points that are in the refining_polygon
824        # described by a list of indexes representing the points
825
826    def __cmp__(self, other):
827        #print "self._attribute_dic",self._attribute_dic
828        #print "other._attribute_dic",other._attribute_dic
829        #print "self._title_index_dic", self._title_index_dic
830        #print "other._title_index_dic", other._title_index_dic
831       
832        #check that a is an instance of this class
833        if isinstance(self, type(other)):
834            result = cmp(self._attribute_dic, other._attribute_dic)
835            if result <>0:
836                return result
837            # The order of the columns is important. Therefore..
838            result = cmp(self._title_index_dic, other._title_index_dic)
839            if result <>0:
840                return result
841            for self_ls, other_ls in map(None,self._attribute_dic, \
842                    other._attribute_dic):
843                result = cmp(self._attribute_dic[self_ls],
844                             other._attribute_dic[other_ls])
845                if result <>0:
846                    return result
847            return 0
848        else:
849            return 1
850   
851    def _load_exposure_csv(self, file_name):
852        """
853        Load in the csv as a dic, title as key and column info as value, .
854        Also, create a dic, title as key and column index as value.
855        """
856        #
857        attribute_dic = {}
858        title_index_dic = {}
859        titles_stripped = [] # list of titles
860       
861        reader = csv.reader(file(file_name))
862
863        # Read in and manipulate the title info
864        titles = reader.next()
865        for i,title in enumerate(titles):
866            titles_stripped.append(title.strip())
867            title_index_dic[title.strip()] = i
868        title_count = len(titles_stripped)       
869        #print "title_index_dic",title_index_dic
870
871       
872        #create a dic of colum values, indexed by column title
873        for line in reader:
874            if len(line) <> title_count:
875                raise IOError #FIXME make this nicer
876            for i, value in enumerate(line):
877                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
878           
879        return attribute_dic, title_index_dic
880
881    def get_column(self, column_name, use_refind_polygon=False):
882        """
883        Given a column name return a list of the column values
884
885        Note, the type of the values will be String!
886        do this to change a list of strings to a list of floats
887        time = [float(x) for x in time]
888       
889        Not implemented:
890        if use_refind_polygon is True, only return values in the
891        refined polygon
892        """
893        if not self._attribute_dic.has_key(column_name):
894            msg = 'Therer is  no column called %s!' %column_name
895            raise TitleValueError, msg
896        return self._attribute_dic[column_name]
897
898
899    def get_value(self, value_column_name,
900                  known_column_name,
901                  known_values,
902                  use_refind_polygon=False):
903        """
904        Do linear interpolation on the known_colum, using the known_value,
905        to return a value of the column_value_name.
906        """
907        pass
908
909
910    def get_location(self, use_refind_polygon=False):
911        """
912        Return a geospatial object which describes the
913        locations of the location file.
914
915        Note, if there is not location info, this returns None.
916       
917        Not implemented:
918        if use_refind_polygon is True, only return values in the
919        refined polygon
920        """
921        return self._geospatial
922
923    def set_column(self, column_name, column_values, overwrite=False):
924        """
925        Add a column to the 'end' (with the right most column being the end)
926        of the csv file.
927
928        Set overwrite to True if you want to overwrite a column.
929       
930        Note, in column_name white space is removed and case is not checked.
931        Precondition
932        The column_name and column_values cannot have comma's in it.
933        """
934       
935        value_row_count = \
936            len(self._attribute_dic[self._title_index_dic.keys()[0]])
937        if len(column_values) <> value_row_count: 
938            msg = 'The number of column values must equal the number of rows.'
939            raise DataMissingValuesError, msg
940       
941        if self._attribute_dic.has_key(column_name):
942            if not overwrite:
943                msg = 'Column name %s already in use!' %column_name
944                raise TitleValueError, msg
945        else:
946            # New title.  Add it to the title index.
947            self._title_index_dic[column_name] = len(self._title_index_dic)
948        self._attribute_dic[column_name] = column_values
949        #print "self._title_index_dic[column_name]",self._title_index_dic
950
951    def save(self, file_name=None):
952        """
953        Save the exposure csv file
954        """
955        if file_name is None:
956            file_name = self._file_name
957       
958        fd = open(file_name,'wb')
959        writer = csv.writer(fd)
960       
961        #Write the title to a cvs file
962        line = [None]* len(self._title_index_dic)
963        for title in self._title_index_dic.iterkeys():
964            line[self._title_index_dic[title]]= title
965        writer.writerow(line)
966       
967        # Write the values to a cvs file
968        value_row_count = \
969            len(self._attribute_dic[self._title_index_dic.keys()[0]])
970        for row_i in range(value_row_count):
971            line = [None]* len(self._title_index_dic)
972            for title in self._title_index_dic.iterkeys():
973                line[self._title_index_dic[title]]= \
974                     self._attribute_dic[title][row_i]
975            writer.writerow(line)
976
977
978#Auxiliary
979def write_obj(filename,x,y,z):
980    """Store x,y,z vectors into filename (obj format)
981       Vectors are assumed to have dimension (M,3) where
982       M corresponds to the number elements.
983       triangles are assumed to be disconnected
984
985       The three numbers in each vector correspond to three vertices,
986
987       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
988
989    """
990    #print 'Writing obj to %s' % filename
991
992    import os.path
993
994    root, ext = os.path.splitext(filename)
995    if ext == '.obj':
996        FN = filename
997    else:
998        FN = filename + '.obj'
999
1000
1001    outfile = open(FN, 'wb')
1002    outfile.write("# Triangulation as an obj file\n")
1003
1004    M, N = x.shape
1005    assert N==3  #Assuming three vertices per element
1006
1007    for i in range(M):
1008        for j in range(N):
1009            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1010
1011    for i in range(M):
1012        base = i*N
1013        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1014
1015    outfile.close()
1016
1017
1018#########################################################
1019#Conversion routines
1020########################################################
1021
1022def sww2obj(basefilename, size):
1023    """Convert netcdf based data output to obj
1024    """
1025    from Scientific.IO.NetCDF import NetCDFFile
1026
1027    from Numeric import Float, zeros
1028
1029    #Get NetCDF
1030    FN = create_filename('.', basefilename, 'sww', size)
1031    print 'Reading from ', FN
1032    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1033
1034
1035    # Get the variables
1036    x = fid.variables['x']
1037    y = fid.variables['y']
1038    z = fid.variables['elevation']
1039    time = fid.variables['time']
1040    stage = fid.variables['stage']
1041
1042    M = size  #Number of lines
1043    xx = zeros((M,3), Float)
1044    yy = zeros((M,3), Float)
1045    zz = zeros((M,3), Float)
1046
1047    for i in range(M):
1048        for j in range(3):
1049            xx[i,j] = x[i+j*M]
1050            yy[i,j] = y[i+j*M]
1051            zz[i,j] = z[i+j*M]
1052
1053    #Write obj for bathymetry
1054    FN = create_filename('.', basefilename, 'obj', size)
1055    write_obj(FN,xx,yy,zz)
1056
1057
1058    #Now read all the data with variable information, combine with
1059    #x,y info and store as obj
1060
1061    for k in range(len(time)):
1062        t = time[k]
1063        print 'Processing timestep %f' %t
1064
1065        for i in range(M):
1066            for j in range(3):
1067                zz[i,j] = stage[k,i+j*M]
1068
1069
1070        #Write obj for variable data
1071        #FN = create_filename(basefilename, 'obj', size, time=t)
1072        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1073        write_obj(FN,xx,yy,zz)
1074
1075
1076def dat2obj(basefilename):
1077    """Convert line based data output to obj
1078    FIXME: Obsolete?
1079    """
1080
1081    import glob, os
1082    from anuga.config import data_dir
1083
1084
1085    #Get bathymetry and x,y's
1086    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1087
1088    from Numeric import zeros, Float
1089
1090    M = len(lines)  #Number of lines
1091    x = zeros((M,3), Float)
1092    y = zeros((M,3), Float)
1093    z = zeros((M,3), Float)
1094
1095    ##i = 0
1096    for i, line in enumerate(lines):
1097        tokens = line.split()
1098        values = map(float,tokens)
1099
1100        for j in range(3):
1101            x[i,j] = values[j*3]
1102            y[i,j] = values[j*3+1]
1103            z[i,j] = values[j*3+2]
1104
1105        ##i += 1
1106
1107
1108    #Write obj for bathymetry
1109    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1110
1111
1112    #Now read all the data files with variable information, combine with
1113    #x,y info
1114    #and store as obj
1115
1116    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1117
1118    for filename in files:
1119        print 'Processing %s' % filename
1120
1121        lines = open(data_dir+os.sep+filename,'r').readlines()
1122        assert len(lines) == M
1123        root, ext = os.path.splitext(filename)
1124
1125        #Get time from filename
1126        i0 = filename.find('_time=')
1127        if i0 == -1:
1128            #Skip bathymetry file
1129            continue
1130
1131        i0 += 6  #Position where time starts
1132        i1 = filename.find('.dat')
1133
1134        if i1 > i0:
1135            t = float(filename[i0:i1])
1136        else:
1137            raise DataTimeError, 'Hmmmm'
1138
1139
1140
1141        ##i = 0
1142        for i, line in enumerate(lines):
1143            tokens = line.split()
1144            values = map(float,tokens)
1145
1146            for j in range(3):
1147                z[i,j] = values[j]
1148
1149            ##i += 1
1150
1151        #Write obj for variable data
1152        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1153
1154
1155def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1156    """Read netcdf filename1, pick timesteps first:step:last and save to
1157    nettcdf file filename2
1158    """
1159    from Scientific.IO.NetCDF import NetCDFFile
1160
1161    #Get NetCDF
1162    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1163    outfile = NetCDFFile(filename2, 'w')  #Open new file
1164
1165
1166    #Copy dimensions
1167    for d in infile.dimensions:
1168        outfile.createDimension(d, infile.dimensions[d])
1169
1170    for name in infile.variables:
1171        var = infile.variables[name]
1172        outfile.createVariable(name, var.typecode(), var.dimensions)
1173
1174
1175    #Copy the static variables
1176    for name in infile.variables:
1177        if name == 'time' or name == 'stage':
1178            pass
1179        else:
1180            #Copy
1181            outfile.variables[name][:] = infile.variables[name][:]
1182
1183    #Copy selected timesteps
1184    time = infile.variables['time']
1185    stage = infile.variables['stage']
1186
1187    newtime = outfile.variables['time']
1188    newstage = outfile.variables['stage']
1189
1190    if last is None:
1191        last = len(time)
1192
1193    selection = range(first, last, step)
1194    for i, j in enumerate(selection):
1195        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1196        newtime[i] = time[j]
1197        newstage[i,:] = stage[j,:]
1198
1199    #Close
1200    infile.close()
1201    outfile.close()
1202
1203
1204#Get data objects
1205def get_dataobject(domain, mode='w'):
1206    """Return instance of class of given format using filename
1207    """
1208
1209    cls = eval('Data_format_%s' %domain.format)
1210    return cls(domain, mode)
1211
1212#FIXME move into geospatial.  There should only be one method that
1213# reads xya, and writes pts.
1214def xya2pts(basename_in, basename_out=None, verbose=False,
1215            #easting_min=None, easting_max=None,
1216            #northing_min=None, northing_max=None,
1217            stride = 1,
1218            attribute_name = 'elevation',
1219            z_func = None):
1220    """Read points data from ascii (.xya)
1221
1222    Example:
1223
1224              x(m)        y(m)        z(m)
1225         0.00000e+00  0.00000e+00  1.3535000e-01
1226         0.00000e+00  1.40000e-02  1.3535000e-01
1227
1228
1229
1230    Convert to NetCDF pts format which is
1231
1232    points:  (Nx2) Float array
1233    elevation: N Float array
1234
1235    Only lines that contain three numeric values are processed
1236
1237    If z_func is specified, it will be applied to the third field
1238    """
1239
1240    import os
1241    #from Scientific.IO.NetCDF import NetCDFFile
1242    from Numeric import Float, arrayrange, concatenate
1243
1244    root, ext = os.path.splitext(basename_in)
1245
1246    if ext == '': ext = '.xya'
1247
1248    #Get NetCDF
1249    infile = open(root + ext, 'r')  #Open existing xya file for read
1250
1251    if verbose: print 'Reading xya points from %s' %(root + ext)
1252
1253    points = []
1254    attribute = []
1255    for i, line in enumerate(infile.readlines()):
1256
1257        if i % stride != 0: continue
1258
1259        fields = line.split()
1260
1261        try:
1262            assert len(fields) == 3
1263        except:
1264            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1265
1266        try:
1267            x = float( fields[0] )
1268            y = float( fields[1] )
1269            z = float( fields[2] )
1270        except:
1271            continue
1272
1273        points.append( [x, y] )
1274
1275        if callable(z_func):
1276            attribute.append(z_func(z))
1277        else:
1278            attribute.append(z)
1279
1280
1281    #Get output file
1282    if basename_out == None:
1283        ptsname = root + '.pts'
1284    else:
1285        ptsname = basename_out + '.pts'
1286
1287    if verbose: print 'Store to NetCDF file %s' %ptsname
1288    write_ptsfile(ptsname, points, attribute, attribute_name)
1289
1290
1291
1292######Obsoleted by export_points in load_mesh
1293def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1294                  zone=None, xllcorner=None, yllcorner=None):
1295    """Write points and associated attribute to pts (NetCDF) format
1296    """
1297
1298    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1299
1300    from Numeric import Float
1301
1302    if attribute_name is None:
1303        attribute_name = 'attribute'
1304
1305
1306    from Scientific.IO.NetCDF import NetCDFFile
1307
1308    # NetCDF file definition
1309    outfile = NetCDFFile(ptsname, 'w')
1310
1311
1312    #Create new file
1313    outfile.institution = 'Geoscience Australia'
1314    outfile.description = 'NetCDF pts format for compact and '\
1315                          'portable storage of spatial point data'
1316
1317
1318    #Georeferencing
1319    from anuga.coordinate_transforms.geo_reference import Geo_reference
1320    if zone is None:
1321        assert xllcorner is None, 'xllcorner must be None'
1322        assert yllcorner is None, 'yllcorner must be None'
1323        Geo_reference().write_NetCDF(outfile)
1324    else:
1325        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1326
1327
1328
1329    outfile.createDimension('number_of_points', len(points))
1330    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1331
1332    # variable definitions
1333    outfile.createVariable('points', Float, ('number_of_points',
1334                                             'number_of_dimensions'))
1335    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1336
1337    # Get handles to the variables
1338    nc_points = outfile.variables['points']
1339    nc_attribute = outfile.variables[attribute_name]
1340
1341    #Store data
1342    nc_points[:, :] = points
1343    nc_attribute[:] = attribute
1344
1345    outfile.close()
1346
1347
1348def dem2pts(basename_in, basename_out=None,
1349            easting_min=None, easting_max=None,
1350            northing_min=None, northing_max=None,
1351            use_cache=False, verbose=False,):
1352    """Read Digitial Elevation model from the following NetCDF format (.dem)
1353
1354    Example:
1355
1356    ncols         3121
1357    nrows         1800
1358    xllcorner     722000
1359    yllcorner     5893000
1360    cellsize      25
1361    NODATA_value  -9999
1362    138.3698 137.4194 136.5062 135.5558 ..........
1363
1364    Convert to NetCDF pts format which is
1365
1366    points:  (Nx2) Float array
1367    elevation: N Float array
1368    """
1369
1370
1371
1372    kwargs = {'basename_out': basename_out,
1373              'easting_min': easting_min,
1374              'easting_max': easting_max,
1375              'northing_min': northing_min,
1376              'northing_max': northing_max,
1377              'verbose': verbose}
1378
1379    if use_cache is True:
1380        from caching import cache
1381        result = cache(_dem2pts, basename_in, kwargs,
1382                       dependencies = [basename_in + '.dem'],
1383                       verbose = verbose)
1384
1385    else:
1386        result = apply(_dem2pts, [basename_in], kwargs)
1387
1388    return result
1389
1390
1391def _dem2pts(basename_in, basename_out=None, verbose=False,
1392            easting_min=None, easting_max=None,
1393            northing_min=None, northing_max=None):
1394    """Read Digitial Elevation model from the following NetCDF format (.dem)
1395
1396    Internal function. See public function dem2pts for details.
1397    """
1398
1399    #FIXME: Can this be written feasibly using write_pts?
1400
1401    import os
1402    from Scientific.IO.NetCDF import NetCDFFile
1403    from Numeric import Float, zeros, reshape, sum
1404
1405    root = basename_in
1406
1407    #Get NetCDF
1408    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1409
1410    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1411
1412    ncols = infile.ncols[0]
1413    nrows = infile.nrows[0]
1414    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1415    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1416    cellsize = infile.cellsize[0]
1417    NODATA_value = infile.NODATA_value[0]
1418    dem_elevation = infile.variables['elevation']
1419
1420    zone = infile.zone[0]
1421    false_easting = infile.false_easting[0]
1422    false_northing = infile.false_northing[0]
1423
1424    #Text strings
1425    projection = infile.projection
1426    datum = infile.datum
1427    units = infile.units
1428
1429
1430    #Get output file
1431    if basename_out == None:
1432        ptsname = root + '.pts'
1433    else:
1434        ptsname = basename_out + '.pts'
1435
1436    if verbose: print 'Store to NetCDF file %s' %ptsname
1437    # NetCDF file definition
1438    outfile = NetCDFFile(ptsname, 'w')
1439
1440    #Create new file
1441    outfile.institution = 'Geoscience Australia'
1442    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1443                      'of spatial point data'
1444    #assign default values
1445    if easting_min is None: easting_min = xllcorner
1446    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1447    if northing_min is None: northing_min = yllcorner
1448    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1449
1450    #compute offsets to update georeferencing
1451    easting_offset = xllcorner - easting_min
1452    northing_offset = yllcorner - northing_min
1453
1454    #Georeferencing
1455    outfile.zone = zone
1456    outfile.xllcorner = easting_min #Easting of lower left corner
1457    outfile.yllcorner = northing_min #Northing of lower left corner
1458    outfile.false_easting = false_easting
1459    outfile.false_northing = false_northing
1460
1461    outfile.projection = projection
1462    outfile.datum = datum
1463    outfile.units = units
1464
1465
1466    #Grid info (FIXME: probably not going to be used, but heck)
1467    outfile.ncols = ncols
1468    outfile.nrows = nrows
1469
1470    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1471    totalnopoints = nrows*ncols
1472
1473    # calculating number of NODATA_values for each row in clipped region
1474    #FIXME: use array operations to do faster
1475    nn = 0
1476    k = 0
1477    i1_0 = 0
1478    j1_0 = 0
1479    thisj = 0
1480    thisi = 0
1481    for i in range(nrows):
1482        y = (nrows-i-1)*cellsize + yllcorner
1483        for j in range(ncols):
1484            x = j*cellsize + xllcorner
1485            if easting_min <= x <= easting_max and \
1486               northing_min <= y <= northing_max:
1487                thisj = j
1488                thisi = i
1489                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1490
1491                if k == 0:
1492                    i1_0 = i
1493                    j1_0 = j
1494                k += 1
1495
1496    index1 = j1_0
1497    index2 = thisj
1498
1499    # dimension definitions
1500    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1501    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1502
1503    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1504    nopoints = clippednopoints-nn
1505
1506    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1507
1508    if verbose:
1509        print 'There are %d values in the elevation' %totalnopoints
1510        print 'There are %d values in the clipped elevation' %clippednopoints
1511        print 'There are %d NODATA_values in the clipped elevation' %nn
1512
1513    outfile.createDimension('number_of_points', nopoints)
1514    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1515
1516    # variable definitions
1517    outfile.createVariable('points', Float, ('number_of_points',
1518                                             'number_of_dimensions'))
1519    outfile.createVariable('elevation', Float, ('number_of_points',))
1520
1521    # Get handles to the variables
1522    points = outfile.variables['points']
1523    elevation = outfile.variables['elevation']
1524
1525    lenv = index2-index1+1
1526    #Store data
1527    global_index = 0
1528    #for i in range(nrows):
1529    for i in range(i1_0,thisi+1,1):
1530        if verbose and i%((nrows+10)/10)==0:
1531            print 'Processing row %d of %d' %(i, nrows)
1532
1533        lower_index = global_index
1534
1535        v = dem_elevation_r[i,index1:index2+1]
1536        no_NODATA = sum(v == NODATA_value)
1537        if no_NODATA > 0:
1538            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1539        else:
1540            newcols = lenv #ncols_in_bounding_box
1541
1542        telev = zeros(newcols, Float)
1543        tpoints = zeros((newcols, 2), Float)
1544
1545        local_index = 0
1546
1547        y = (nrows-i-1)*cellsize + yllcorner
1548        #for j in range(ncols):
1549        for j in range(j1_0,index2+1,1):
1550
1551            x = j*cellsize + xllcorner
1552            if easting_min <= x <= easting_max and \
1553               northing_min <= y <= northing_max and \
1554               dem_elevation_r[i,j] <> NODATA_value:
1555                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1556                telev[local_index] = dem_elevation_r[i, j]
1557                global_index += 1
1558                local_index += 1
1559
1560        upper_index = global_index
1561
1562        if upper_index == lower_index + newcols:
1563            points[lower_index:upper_index, :] = tpoints
1564            elevation[lower_index:upper_index] = telev
1565
1566    assert global_index == nopoints, 'index not equal to number of points'
1567
1568    infile.close()
1569    outfile.close()
1570
1571
1572
1573def _read_hecras_cross_sections(lines):
1574    """Return block of surface lines for each cross section
1575    Starts with SURFACE LINE,
1576    Ends with END CROSS-SECTION
1577    """
1578
1579    points = []
1580
1581    reading_surface = False
1582    for i, line in enumerate(lines):
1583
1584        if len(line.strip()) == 0:    #Ignore blanks
1585            continue
1586
1587        if lines[i].strip().startswith('SURFACE LINE'):
1588            reading_surface = True
1589            continue
1590
1591        if lines[i].strip().startswith('END') and reading_surface:
1592            yield points
1593            reading_surface = False
1594            points = []
1595
1596        if reading_surface:
1597            fields = line.strip().split(',')
1598            easting = float(fields[0])
1599            northing = float(fields[1])
1600            elevation = float(fields[2])
1601            points.append([easting, northing, elevation])
1602
1603
1604
1605
1606def hecras_cross_sections2pts(basename_in,
1607                              basename_out=None,
1608                              verbose=False):
1609    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1610
1611    Example:
1612
1613
1614# RAS export file created on Mon 15Aug2005 11:42
1615# by HEC-RAS Version 3.1.1
1616
1617BEGIN HEADER:
1618  UNITS: METRIC
1619  DTM TYPE: TIN
1620  DTM: v:\1\cit\perth_topo\river_tin
1621  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1622  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1623  MAP PROJECTION: UTM
1624  PROJECTION ZONE: 50
1625  DATUM: AGD66
1626  VERTICAL DATUM:
1627  NUMBER OF REACHES:  19
1628  NUMBER OF CROSS-SECTIONS:  14206
1629END HEADER:
1630
1631
1632Only the SURFACE LINE data of the following form will be utilised
1633
1634  CROSS-SECTION:
1635    STREAM ID:Southern-Wungong
1636    REACH ID:Southern-Wungong
1637    STATION:19040.*
1638    CUT LINE:
1639      405548.671603161 , 6438142.7594925
1640      405734.536092045 , 6438326.10404912
1641      405745.130459356 , 6438331.48627354
1642      405813.89633823 , 6438368.6272789
1643    SURFACE LINE:
1644     405548.67,   6438142.76,   35.37
1645     405552.24,   6438146.28,   35.41
1646     405554.78,   6438148.78,   35.44
1647     405555.80,   6438149.79,   35.44
1648     405559.37,   6438153.31,   35.45
1649     405560.88,   6438154.81,   35.44
1650     405562.93,   6438156.83,   35.42
1651     405566.50,   6438160.35,   35.38
1652     405566.99,   6438160.83,   35.37
1653     ...
1654   END CROSS-SECTION
1655
1656    Convert to NetCDF pts format which is
1657
1658    points:  (Nx2) Float array
1659    elevation: N Float array
1660    """
1661
1662    #FIXME: Can this be written feasibly using write_pts?
1663
1664    import os
1665    from Scientific.IO.NetCDF import NetCDFFile
1666    from Numeric import Float, zeros, reshape
1667
1668    root = basename_in
1669
1670    #Get ASCII file
1671    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1672
1673    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1674
1675    lines = infile.readlines()
1676    infile.close()
1677
1678    if verbose: print 'Converting to pts format'
1679
1680    i = 0
1681    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1682        i += 1
1683
1684    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1685    i += 1
1686
1687    assert lines[i].strip().upper().startswith('UNITS:')
1688    units = lines[i].strip().split()[1]
1689    i += 1
1690
1691    assert lines[i].strip().upper().startswith('DTM TYPE:')
1692    i += 1
1693
1694    assert lines[i].strip().upper().startswith('DTM:')
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('STREAM')
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('CROSS')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1704    projection = lines[i].strip().split(':')[1]
1705    i += 1
1706
1707    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1708    zone = int(lines[i].strip().split(':')[1])
1709    i += 1
1710
1711    assert lines[i].strip().upper().startswith('DATUM:')
1712    datum = lines[i].strip().split(':')[1]
1713    i += 1
1714
1715    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1716    i += 1
1717
1718    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1719    i += 1
1720
1721    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1722    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1723    i += 1
1724
1725
1726    #Now read all points
1727    points = []
1728    elevation = []
1729    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1730        for k, entry in enumerate(entries):
1731            points.append(entry[:2])
1732            elevation.append(entry[2])
1733
1734
1735    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1736          %(j+1, number_of_cross_sections)
1737    assert j+1 == number_of_cross_sections, msg
1738
1739    #Get output file
1740    if basename_out == None:
1741        ptsname = root + '.pts'
1742    else:
1743        ptsname = basename_out + '.pts'
1744
1745    #FIXME (DSG-ON): use loadASCII export_points_file
1746    if verbose: print 'Store to NetCDF file %s' %ptsname
1747    # NetCDF file definition
1748    outfile = NetCDFFile(ptsname, 'w')
1749
1750    #Create new file
1751    outfile.institution = 'Geoscience Australia'
1752    outfile.description = 'NetCDF pts format for compact and portable ' +\
1753                          'storage of spatial point data derived from HEC-RAS'
1754
1755    #Georeferencing
1756    outfile.zone = zone
1757    outfile.xllcorner = 0.0
1758    outfile.yllcorner = 0.0
1759    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1760    outfile.false_northing = 1000000
1761
1762    outfile.projection = projection
1763    outfile.datum = datum
1764    outfile.units = units
1765
1766
1767    outfile.createDimension('number_of_points', len(points))
1768    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1769
1770    # variable definitions
1771    outfile.createVariable('points', Float, ('number_of_points',
1772                                             'number_of_dimensions'))
1773    outfile.createVariable('elevation', Float, ('number_of_points',))
1774
1775    # Get handles to the variables
1776    outfile.variables['points'][:] = points
1777    outfile.variables['elevation'][:] = elevation
1778
1779    outfile.close()
1780
1781
1782
1783def sww2dem(basename_in, basename_out = None,
1784            quantity = None,
1785            timestep = None,
1786            reduction = None,
1787            cellsize = 10,
1788            NODATA_value = -9999,
1789            easting_min = None,
1790            easting_max = None,
1791            northing_min = None,
1792            northing_max = None,
1793            verbose = False,
1794            origin = None,
1795            datum = 'WGS84',
1796        format = 'ers'):
1797
1798    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1799
1800    Example (ASC):
1801
1802    ncols         3121
1803    nrows         1800
1804    xllcorner     722000
1805    yllcorner     5893000
1806    cellsize      25
1807    NODATA_value  -9999
1808    138.3698 137.4194 136.5062 135.5558 ..........
1809
1810    Also write accompanying file with same basename_in but extension .prj
1811    used to fix the UTM zone, datum, false northings and eastings.
1812
1813    The prj format is assumed to be as
1814
1815    Projection    UTM
1816    Zone          56
1817    Datum         WGS84
1818    Zunits        NO
1819    Units         METERS
1820    Spheroid      WGS84
1821    Xshift        0.0000000000
1822    Yshift        10000000.0000000000
1823    Parameters
1824
1825
1826    The parameter quantity must be the name of an existing quantity or
1827    an expression involving existing quantities. The default is
1828    'elevation'.
1829
1830    if timestep (an index) is given, output quantity at that timestep
1831
1832    if reduction is given use that to reduce quantity over all timesteps.
1833
1834    datum
1835
1836    format can be either 'asc' or 'ers'
1837    """
1838
1839    import sys
1840    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1841    from Numeric import array2string
1842
1843    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1844    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1845
1846    msg = 'Format must be either asc or ers'
1847    assert format.lower() in ['asc', 'ers'], msg
1848
1849
1850    false_easting = 500000
1851    false_northing = 10000000
1852
1853    if quantity is None:
1854        quantity = 'elevation'
1855
1856    if reduction is None:
1857        reduction = max
1858
1859    if basename_out is None:
1860        basename_out = basename_in + '_%s' %quantity
1861
1862    swwfile = basename_in + '.sww'
1863    demfile = basename_out + '.' + format
1864    # Note the use of a .ers extension is optional (write_ermapper_grid will
1865    # deal with either option
1866
1867    #Read sww file
1868    if verbose: print 'Reading from %s' %swwfile
1869    from Scientific.IO.NetCDF import NetCDFFile
1870    fid = NetCDFFile(swwfile)
1871
1872    #Get extent and reference
1873    x = fid.variables['x'][:]
1874    y = fid.variables['y'][:]
1875    volumes = fid.variables['volumes'][:]
1876    if timestep is not None:
1877        times = fid.variables['time'][timestep]
1878    else:
1879        times = fid.variables['time'][:]
1880
1881    number_of_timesteps = fid.dimensions['number_of_timesteps']
1882    number_of_points = fid.dimensions['number_of_points']
1883   
1884    if origin is None:
1885
1886        #Get geo_reference
1887        #sww files don't have to have a geo_ref
1888        try:
1889            geo_reference = Geo_reference(NetCDFObject=fid)
1890        except AttributeError, e:
1891            geo_reference = Geo_reference() #Default georef object
1892
1893        xllcorner = geo_reference.get_xllcorner()
1894        yllcorner = geo_reference.get_yllcorner()
1895        zone = geo_reference.get_zone()
1896    else:
1897        zone = origin[0]
1898        xllcorner = origin[1]
1899        yllcorner = origin[2]
1900
1901
1902
1903    #FIXME: Refactor using code from file_function.statistics
1904    #Something like print swwstats(swwname)
1905    if verbose:
1906        print '------------------------------------------------'
1907        print 'Statistics of SWW file:'
1908        print '  Name: %s' %swwfile
1909        print '  Reference:'
1910        print '    Lower left corner: [%f, %f]'\
1911              %(xllcorner, yllcorner)
1912        if timestep is not None:
1913            print '    Time: %f' %(times)
1914        else:
1915            print '    Start time: %f' %fid.starttime[0]
1916        print '  Extent:'
1917        print '    x [m] in [%f, %f], len(x) == %d'\
1918              %(min(x.flat), max(x.flat), len(x.flat))
1919        print '    y [m] in [%f, %f], len(y) == %d'\
1920              %(min(y.flat), max(y.flat), len(y.flat))
1921        if timestep is not None:
1922            print '    t [s] = %f, len(t) == %d' %(times, 1)
1923        else:
1924            print '    t [s] in [%f, %f], len(t) == %d'\
1925              %(min(times), max(times), len(times))
1926        print '  Quantities [SI units]:'
1927        for name in ['stage', 'xmomentum', 'ymomentum']:
1928            q = fid.variables[name][:].flat
1929            if timestep is not None:
1930                q = q[timestep*len(x):(timestep+1)*len(x)]
1931            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1932        for name in ['elevation']:
1933            q = fid.variables[name][:].flat
1934            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1935           
1936    # Get quantity and reduce if applicable
1937    if verbose: print 'Processing quantity %s' %quantity
1938
1939    # Turn NetCDF objects into Numeric arrays
1940    quantity_dict = {}
1941    for name in fid.variables.keys():
1942        quantity_dict[name] = fid.variables[name][:] 
1943
1944
1945    # Convert quantity expression to quantities found in sww file   
1946    q = apply_expression_to_dictionary(quantity, quantity_dict)
1947
1948    if len(q.shape) == 2:
1949        #q has a time component and needs to be reduced along
1950        #the temporal dimension
1951        if verbose: print 'Reducing quantity %s' %quantity
1952        q_reduced = zeros( number_of_points, Float )
1953       
1954        if timestep is not None:
1955            for k in range(number_of_points):
1956                q_reduced[k] = q[timestep,k] 
1957        else:
1958            for k in range(number_of_points):
1959                q_reduced[k] = reduction( q[:,k] )
1960
1961        q = q_reduced
1962
1963    #Post condition: Now q has dimension: number_of_points
1964    assert len(q.shape) == 1
1965    assert q.shape[0] == number_of_points
1966
1967
1968    if verbose:
1969        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1970
1971
1972    #Create grid and update xll/yll corner and x,y
1973
1974    #Relative extent
1975    if easting_min is None:
1976        xmin = min(x)
1977    else:
1978        xmin = easting_min - xllcorner
1979
1980    if easting_max is None:
1981        xmax = max(x)
1982    else:
1983        xmax = easting_max - xllcorner
1984
1985    if northing_min is None:
1986        ymin = min(y)
1987    else:
1988        ymin = northing_min - yllcorner
1989
1990    if northing_max is None:
1991        ymax = max(y)
1992    else:
1993        ymax = northing_max - yllcorner
1994
1995
1996
1997    if verbose: print 'Creating grid'
1998    ncols = int((xmax-xmin)/cellsize)+1
1999    nrows = int((ymax-ymin)/cellsize)+1
2000
2001
2002    #New absolute reference and coordinates
2003    newxllcorner = xmin+xllcorner
2004    newyllcorner = ymin+yllcorner
2005
2006    x = x+xllcorner-newxllcorner
2007    y = y+yllcorner-newyllcorner
2008   
2009    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2010    assert len(vertex_points.shape) == 2
2011
2012    grid_points = zeros ( (ncols*nrows, 2), Float )
2013
2014
2015    for i in xrange(nrows):
2016        if format.lower() == 'asc':
2017            yg = i*cellsize
2018        else:
2019        #this will flip the order of the y values for ers
2020            yg = (nrows-i)*cellsize
2021
2022        for j in xrange(ncols):
2023            xg = j*cellsize
2024            k = i*ncols + j
2025
2026            grid_points[k,0] = xg
2027            grid_points[k,1] = yg
2028
2029    #Interpolate
2030    #from least_squares import Interpolation
2031    from anuga.fit_interpolate.interpolate import Interpolate
2032
2033    print 'hello', vertex_points.shape, volumes.shape
2034    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2035
2036    #Interpolate using quantity values
2037    if verbose: print 'Interpolating'
2038    grid_values = interp.interpolate(q, grid_points).flat
2039
2040
2041    if verbose:
2042        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2043                                                      max(grid_values))
2044
2045    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2046    P = interp.mesh.get_boundary_polygon()
2047    outside_indices = outside_polygon(grid_points, P, closed=True)
2048
2049    for i in outside_indices:
2050        grid_values[i] = NODATA_value
2051
2052
2053
2054
2055    if format.lower() == 'ers':
2056        # setup ERS header information
2057        grid_values = reshape(grid_values,(nrows, ncols))
2058        header = {}
2059        header['datum'] = '"' + datum + '"'
2060        # FIXME The use of hardwired UTM and zone number needs to be made optional
2061        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2062        header['projection'] = '"UTM-' + str(zone) + '"'
2063        header['coordinatetype'] = 'EN'
2064        if header['coordinatetype'] == 'LL':
2065            header['longitude'] = str(newxllcorner)
2066            header['latitude'] = str(newyllcorner)
2067        elif header['coordinatetype'] == 'EN':
2068            header['eastings'] = str(newxllcorner)
2069            header['northings'] = str(newyllcorner)
2070        header['nullcellvalue'] = str(NODATA_value)
2071        header['xdimension'] = str(cellsize)
2072        header['ydimension'] = str(cellsize)
2073        header['value'] = '"' + quantity + '"'
2074        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2075
2076
2077        #Write
2078        if verbose: print 'Writing %s' %demfile
2079        import ermapper_grids
2080        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2081
2082        fid.close()
2083    else:
2084        #Write to Ascii format
2085
2086        #Write prj file
2087        prjfile = basename_out + '.prj'
2088
2089        if verbose: print 'Writing %s' %prjfile
2090        prjid = open(prjfile, 'w')
2091        prjid.write('Projection    %s\n' %'UTM')
2092        prjid.write('Zone          %d\n' %zone)
2093        prjid.write('Datum         %s\n' %datum)
2094        prjid.write('Zunits        NO\n')
2095        prjid.write('Units         METERS\n')
2096        prjid.write('Spheroid      %s\n' %datum)
2097        prjid.write('Xshift        %d\n' %false_easting)
2098        prjid.write('Yshift        %d\n' %false_northing)
2099        prjid.write('Parameters\n')
2100        prjid.close()
2101
2102
2103
2104        if verbose: print 'Writing %s' %demfile
2105
2106        ascid = open(demfile, 'w')
2107
2108        ascid.write('ncols         %d\n' %ncols)
2109        ascid.write('nrows         %d\n' %nrows)
2110        ascid.write('xllcorner     %d\n' %newxllcorner)
2111        ascid.write('yllcorner     %d\n' %newyllcorner)
2112        ascid.write('cellsize      %f\n' %cellsize)
2113        ascid.write('NODATA_value  %d\n' %NODATA_value)
2114
2115
2116        #Get bounding polygon from mesh
2117        #P = interp.mesh.get_boundary_polygon()
2118        #inside_indices = inside_polygon(grid_points, P)
2119
2120        for i in range(nrows):
2121            if verbose and i%((nrows+10)/10)==0:
2122                print 'Doing row %d of %d' %(i, nrows)
2123
2124            base_index = (nrows-i-1)*ncols
2125
2126            slice = grid_values[base_index:base_index+ncols]
2127            s = array2string(slice, max_line_width=sys.maxint)
2128            ascid.write(s[1:-1] + '\n')
2129
2130
2131            #print
2132            #for j in range(ncols):
2133            #    index = base_index+j#
2134            #    print grid_values[index],
2135            #    ascid.write('%f ' %grid_values[index])
2136            #ascid.write('\n')
2137
2138        #Close
2139        ascid.close()
2140        fid.close()
2141
2142#Backwards compatibility
2143def sww2asc(basename_in, basename_out = None,
2144            quantity = None,
2145            timestep = None,
2146            reduction = None,
2147            cellsize = 10,
2148            verbose = False,
2149            origin = None):
2150    print 'sww2asc will soon be obsoleted - please use sww2dem'
2151    sww2dem(basename_in,
2152            basename_out = basename_out,
2153            quantity = quantity,
2154            timestep = timestep,
2155            reduction = reduction,
2156            cellsize = cellsize,
2157            verbose = verbose,
2158            origin = origin,
2159        datum = 'WGS84',
2160        format = 'asc')
2161
2162def sww2ers(basename_in, basename_out = None,
2163            quantity = None,
2164            timestep = None,
2165            reduction = None,
2166            cellsize = 10,
2167            verbose = False,
2168            origin = None,
2169            datum = 'WGS84'):
2170    print 'sww2ers will soon be obsoleted - please use sww2dem'
2171    sww2dem(basename_in,
2172            basename_out = basename_out,
2173            quantity = quantity,
2174            timestep = timestep,
2175            reduction = reduction,
2176            cellsize = cellsize,
2177            verbose = verbose,
2178            origin = origin,
2179            datum = datum,
2180            format = 'ers')
2181################################# END COMPATIBILITY ##############
2182
2183
2184
2185def sww2pts(basename_in, basename_out=None,
2186            data_points=None,
2187            quantity=None,
2188            timestep=None,
2189            reduction=None,
2190            NODATA_value=-9999,
2191            verbose=False,
2192            origin=None):
2193            #datum = 'WGS84')
2194
2195
2196    """Read SWW file and convert to interpolated values at selected points
2197
2198    The parameter quantity' must be the name of an existing quantity or
2199    an expression involving existing quantities. The default is
2200    'elevation'.
2201
2202    if timestep (an index) is given, output quantity at that timestep
2203
2204    if reduction is given use that to reduce quantity over all timesteps.
2205
2206    data_points (Nx2 array) give locations of points where quantity is to be computed.
2207   
2208    """
2209
2210    import sys
2211    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2212    from Numeric import array2string
2213
2214    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2215    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2216
2217    from anuga.geospatial_data.geospatial_data import Geospatial_data
2218
2219    if quantity is None:
2220        quantity = 'elevation'
2221
2222    if reduction is None:
2223        reduction = max
2224
2225    if basename_out is None:
2226        basename_out = basename_in + '_%s' %quantity
2227
2228    swwfile = basename_in + '.sww'
2229    ptsfile = basename_out + '.pts'
2230
2231    # Read sww file
2232    if verbose: print 'Reading from %s' %swwfile
2233    from Scientific.IO.NetCDF import NetCDFFile
2234    fid = NetCDFFile(swwfile)
2235
2236    # Get extent and reference
2237    x = fid.variables['x'][:]
2238    y = fid.variables['y'][:]
2239    volumes = fid.variables['volumes'][:]
2240
2241    number_of_timesteps = fid.dimensions['number_of_timesteps']
2242    number_of_points = fid.dimensions['number_of_points']
2243    if origin is None:
2244
2245        # Get geo_reference
2246        # sww files don't have to have a geo_ref
2247        try:
2248            geo_reference = Geo_reference(NetCDFObject=fid)
2249        except AttributeError, e:
2250            geo_reference = Geo_reference() #Default georef object
2251
2252        xllcorner = geo_reference.get_xllcorner()
2253        yllcorner = geo_reference.get_yllcorner()
2254        zone = geo_reference.get_zone()
2255    else:
2256        zone = origin[0]
2257        xllcorner = origin[1]
2258        yllcorner = origin[2]
2259
2260
2261
2262    # FIXME: Refactor using code from file_function.statistics
2263    # Something like print swwstats(swwname)
2264    if verbose:
2265        x = fid.variables['x'][:]
2266        y = fid.variables['y'][:]
2267        times = fid.variables['time'][:]
2268        print '------------------------------------------------'
2269        print 'Statistics of SWW file:'
2270        print '  Name: %s' %swwfile
2271        print '  Reference:'
2272        print '    Lower left corner: [%f, %f]'\
2273              %(xllcorner, yllcorner)
2274        print '    Start time: %f' %fid.starttime[0]
2275        print '  Extent:'
2276        print '    x [m] in [%f, %f], len(x) == %d'\
2277              %(min(x.flat), max(x.flat), len(x.flat))
2278        print '    y [m] in [%f, %f], len(y) == %d'\
2279              %(min(y.flat), max(y.flat), len(y.flat))
2280        print '    t [s] in [%f, %f], len(t) == %d'\
2281              %(min(times), max(times), len(times))
2282        print '  Quantities [SI units]:'
2283        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2284            q = fid.variables[name][:].flat
2285            print '    %s in [%f, %f]' %(name, min(q), max(q))
2286
2287
2288
2289    # Get quantity and reduce if applicable
2290    if verbose: print 'Processing quantity %s' %quantity
2291
2292    # Turn NetCDF objects into Numeric arrays
2293    quantity_dict = {}
2294    for name in fid.variables.keys():
2295        quantity_dict[name] = fid.variables[name][:]
2296
2297
2298
2299    # Convert quantity expression to quantities found in sww file   
2300    q = apply_expression_to_dictionary(quantity, quantity_dict)
2301
2302
2303
2304    if len(q.shape) == 2:
2305        # q has a time component and needs to be reduced along
2306        # the temporal dimension
2307        if verbose: print 'Reducing quantity %s' %quantity
2308        q_reduced = zeros( number_of_points, Float )
2309
2310        for k in range(number_of_points):
2311            q_reduced[k] = reduction( q[:,k] )
2312
2313        q = q_reduced
2314
2315    # Post condition: Now q has dimension: number_of_points
2316    assert len(q.shape) == 1
2317    assert q.shape[0] == number_of_points
2318
2319
2320    if verbose:
2321        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2322
2323
2324    # Create grid and update xll/yll corner and x,y
2325    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2326    assert len(vertex_points.shape) == 2
2327
2328    # Interpolate
2329    from anuga.fit_interpolate.interpolate import Interpolate
2330    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2331
2332    # Interpolate using quantity values
2333    if verbose: print 'Interpolating'
2334    interpolated_values = interp.interpolate(q, data_points).flat
2335
2336
2337    if verbose:
2338        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2339                                                      max(interpolated_values))
2340
2341    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2342    P = interp.mesh.get_boundary_polygon()
2343    outside_indices = outside_polygon(data_points, P, closed=True)
2344
2345    for i in outside_indices:
2346        interpolated_values[i] = NODATA_value
2347
2348
2349    # Store results   
2350    G = Geospatial_data(data_points=data_points,
2351                        attributes=interpolated_values)
2352
2353    G.export_points_file(ptsfile, absolute = True)
2354
2355    fid.close()
2356
2357
2358def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2359                                  use_cache = False,
2360                                  verbose = False):
2361    """Read Digitial Elevation model from the following ASCII format (.asc)
2362
2363    Example:
2364
2365    ncols         3121
2366    nrows         1800
2367    xllcorner     722000
2368    yllcorner     5893000
2369    cellsize      25
2370    NODATA_value  -9999
2371    138.3698 137.4194 136.5062 135.5558 ..........
2372
2373    Convert basename_in + '.asc' to NetCDF format (.dem)
2374    mimicking the ASCII format closely.
2375
2376
2377    An accompanying file with same basename_in but extension .prj must exist
2378    and is used to fix the UTM zone, datum, false northings and eastings.
2379
2380    The prj format is assumed to be as
2381
2382    Projection    UTM
2383    Zone          56
2384    Datum         WGS84
2385    Zunits        NO
2386    Units         METERS
2387    Spheroid      WGS84
2388    Xshift        0.0000000000
2389    Yshift        10000000.0000000000
2390    Parameters
2391    """
2392
2393
2394
2395    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2396
2397    if use_cache is True:
2398        from caching import cache
2399        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2400                       dependencies = [basename_in + '.asc'],
2401                       verbose = verbose)
2402
2403    else:
2404        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2405
2406    return result
2407
2408
2409
2410
2411
2412
2413def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2414                                  verbose = False):
2415    """Read Digitial Elevation model from the following ASCII format (.asc)
2416
2417    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2418    """
2419
2420    import os
2421    from Scientific.IO.NetCDF import NetCDFFile
2422    from Numeric import Float, array
2423
2424    #root, ext = os.path.splitext(basename_in)
2425    root = basename_in
2426
2427    ###########################################
2428    # Read Meta data
2429    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2430    metadatafile = open(root + '.prj')
2431    metalines = metadatafile.readlines()
2432    metadatafile.close()
2433
2434    L = metalines[0].strip().split()
2435    assert L[0].strip().lower() == 'projection'
2436    projection = L[1].strip()                   #TEXT
2437
2438    L = metalines[1].strip().split()
2439    assert L[0].strip().lower() == 'zone'
2440    zone = int(L[1].strip())
2441
2442    L = metalines[2].strip().split()
2443    assert L[0].strip().lower() == 'datum'
2444    datum = L[1].strip()                        #TEXT
2445
2446    L = metalines[3].strip().split()
2447    assert L[0].strip().lower() == 'zunits'     #IGNORE
2448    zunits = L[1].strip()                       #TEXT
2449
2450    L = metalines[4].strip().split()
2451    assert L[0].strip().lower() == 'units'
2452    units = L[1].strip()                        #TEXT
2453
2454    L = metalines[5].strip().split()
2455    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2456    spheroid = L[1].strip()                     #TEXT
2457
2458    L = metalines[6].strip().split()
2459    assert L[0].strip().lower() == 'xshift'
2460    false_easting = float(L[1].strip())
2461
2462    L = metalines[7].strip().split()
2463    assert L[0].strip().lower() == 'yshift'
2464    false_northing = float(L[1].strip())
2465
2466    #print false_easting, false_northing, zone, datum
2467
2468
2469    ###########################################
2470    #Read DEM data
2471
2472    datafile = open(basename_in + '.asc')
2473
2474    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2475    lines = datafile.readlines()
2476    datafile.close()
2477
2478    if verbose: print 'Got', len(lines), ' lines'
2479
2480    ncols = int(lines[0].split()[1].strip())
2481    nrows = int(lines[1].split()[1].strip())
2482    xllcorner = float(lines[2].split()[1].strip())
2483    yllcorner = float(lines[3].split()[1].strip())
2484    cellsize = float(lines[4].split()[1].strip())
2485    NODATA_value = int(lines[5].split()[1].strip())
2486
2487    assert len(lines) == nrows + 6
2488
2489
2490    ##########################################
2491
2492
2493    if basename_out == None:
2494        netcdfname = root + '.dem'
2495    else:
2496        netcdfname = basename_out + '.dem'
2497
2498    if verbose: print 'Store to NetCDF file %s' %netcdfname
2499    # NetCDF file definition
2500    fid = NetCDFFile(netcdfname, 'w')
2501
2502    #Create new file
2503    fid.institution = 'Geoscience Australia'
2504    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2505                      'of spatial point data'
2506
2507    fid.ncols = ncols
2508    fid.nrows = nrows
2509    fid.xllcorner = xllcorner
2510    fid.yllcorner = yllcorner
2511    fid.cellsize = cellsize
2512    fid.NODATA_value = NODATA_value
2513
2514    fid.zone = zone
2515    fid.false_easting = false_easting
2516    fid.false_northing = false_northing
2517    fid.projection = projection
2518    fid.datum = datum
2519    fid.units = units
2520
2521
2522    # dimension definitions
2523    fid.createDimension('number_of_rows', nrows)
2524    fid.createDimension('number_of_columns', ncols)
2525
2526    # variable definitions
2527    fid.createVariable('elevation', Float, ('number_of_rows',
2528                                            'number_of_columns'))
2529
2530    # Get handles to the variables
2531    elevation = fid.variables['elevation']
2532
2533    #Store data
2534    n = len(lines[6:])
2535    for i, line in enumerate(lines[6:]):
2536        fields = line.split()
2537        if verbose and i%((n+10)/10)==0:
2538            print 'Processing row %d of %d' %(i, nrows)
2539
2540        elevation[i, :] = array([float(x) for x in fields])
2541
2542    fid.close()
2543
2544
2545
2546
2547
2548def ferret2sww(basename_in, basename_out = None,
2549               verbose = False,
2550               minlat = None, maxlat = None,
2551               minlon = None, maxlon = None,
2552               mint = None, maxt = None, mean_stage = 0,
2553               origin = None, zscale = 1,
2554               fail_on_NaN = True,
2555               NaN_filler = 0,
2556               elevation = None,
2557               inverted_bathymetry = True
2558               ): #FIXME: Bathymetry should be obtained
2559                                  #from MOST somehow.
2560                                  #Alternatively from elsewhere
2561                                  #or, as a last resort,
2562                                  #specified here.
2563                                  #The value of -100 will work
2564                                  #for the Wollongong tsunami
2565                                  #scenario but is very hacky
2566    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2567    sww format native to abstract_2d_finite_volumes.
2568
2569    Specify only basename_in and read files of the form
2570    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2571    relative height, x-velocity and y-velocity, respectively.
2572
2573    Also convert latitude and longitude to UTM. All coordinates are
2574    assumed to be given in the GDA94 datum.
2575
2576    min's and max's: If omitted - full extend is used.
2577    To include a value min may equal it, while max must exceed it.
2578    Lat and lon are assuemd to be in decimal degrees
2579
2580    origin is a 3-tuple with geo referenced
2581    UTM coordinates (zone, easting, northing)
2582
2583    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2584    which means that longitude is the fastest
2585    varying dimension (row major order, so to speak)
2586
2587    ferret2sww uses grid points as vertices in a triangular grid
2588    counting vertices from lower left corner upwards, then right
2589    """
2590
2591    import os
2592    from Scientific.IO.NetCDF import NetCDFFile
2593    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2594    from Numeric import allclose, around
2595
2596    precision = Float
2597
2598    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2599
2600    if minlat != None:
2601        assert -90 < minlat < 90 , msg
2602    if maxlat != None:
2603        assert -90 < maxlat < 90 , msg
2604    if minlon != None:
2605        assert -180 < minlon < 180 , msg
2606    if maxlon != None:
2607        assert -180 < maxlon < 180 , msg
2608
2609
2610    #Get NetCDF data
2611    if verbose: print 'Reading files %s_*.nc' %basename_in
2612    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2613    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2614    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2615    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2616    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2617
2618    if basename_out is None:
2619        swwname = basename_in + '.sww'
2620    else:
2621        swwname = basename_out + '.sww'
2622
2623#    Get dimensions of file_h
2624    for dimension in file_h.dimensions.keys():
2625        if dimension[:3] == 'LON':
2626            dim_h_longitude = dimension
2627        if dimension[:3] == 'LAT':
2628            dim_h_latitude = dimension
2629        if dimension[:4] == 'TIME':
2630            dim_h_time = dimension
2631
2632#    print 'long:', dim_h_longitude
2633#    print 'lats:', dim_h_latitude
2634#    print 'times:', dim_h_time
2635
2636    times = file_h.variables[dim_h_time]
2637    latitudes = file_h.variables[dim_h_latitude]
2638    longitudes = file_h.variables[dim_h_longitude]
2639   
2640# get dimensions for file_e
2641    for dimension in file_e.dimensions.keys():
2642        if dimension[:3] == 'LON':
2643            dim_e_longitude = dimension
2644        if dimension[:3] == 'LAT':
2645            dim_e_latitude = dimension
2646
2647# get dimensions for file_u
2648    for dimension in file_u.dimensions.keys():
2649        if dimension[:3] == 'LON':
2650            dim_u_longitude = dimension
2651        if dimension[:3] == 'LAT':
2652            dim_u_latitude = dimension
2653        if dimension[:4] == 'TIME':
2654            dim_u_time = dimension
2655           
2656# get dimensions for file_v
2657    for dimension in file_v.dimensions.keys():
2658        if dimension[:3] == 'LON':
2659            dim_v_longitude = dimension
2660        if dimension[:3] == 'LAT':
2661            dim_v_latitude = dimension
2662        if dimension[:4] == 'TIME':
2663            dim_v_time = dimension
2664
2665
2666    #Precision used by most for lat/lon is 4 or 5 decimals
2667    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2668    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2669
2670    #Check that files are compatible
2671    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2672    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2673    assert allclose(latitudes, e_lat)
2674
2675    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2676    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2677    assert allclose(longitudes, e_lon)
2678
2679    if mint == None:
2680        jmin = 0
2681    else:
2682        jmin = searchsorted(times, mint)
2683
2684    if maxt == None:
2685        jmax=len(times)
2686    else:
2687        jmax = searchsorted(times, maxt)
2688
2689    if minlat == None:
2690        kmin=0
2691    else:
2692        kmin = searchsorted(latitudes, minlat)
2693
2694    if maxlat == None:
2695        kmax = len(latitudes)
2696    else:
2697        kmax = searchsorted(latitudes, maxlat)
2698
2699    if minlon == None:
2700        lmin=0
2701    else:
2702        lmin = searchsorted(longitudes, minlon)
2703
2704    if maxlon == None:
2705        lmax = len(longitudes)
2706    else:
2707        lmax = searchsorted(longitudes, maxlon)
2708
2709#    print' j', jmin, jmax
2710    times = times[jmin:jmax]
2711    latitudes = latitudes[kmin:kmax]
2712    longitudes = longitudes[lmin:lmax]
2713
2714
2715    if verbose: print 'cropping'
2716    zname = 'ELEVATION'
2717
2718    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2719    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2720    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2721    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2722
2723    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2724    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2725    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2726    #        from Numeric import asarray
2727    #        elevations=elevations.tolist()
2728    #        elevations.reverse()
2729    #        elevations=asarray(elevations)
2730    #    else:
2731    #        from Numeric import asarray
2732    #        elevations=elevations.tolist()
2733    #        elevations.reverse()
2734    #        elevations=asarray(elevations)
2735    #        'print hmmm'
2736
2737
2738
2739    #Get missing values
2740    nan_ha = file_h.variables['HA'].missing_value[0]
2741    nan_ua = file_u.variables['UA'].missing_value[0]
2742    nan_va = file_v.variables['VA'].missing_value[0]
2743    if hasattr(file_e.variables[zname],'missing_value'):
2744        nan_e  = file_e.variables[zname].missing_value[0]
2745    else:
2746        nan_e = None
2747
2748    #Cleanup
2749    from Numeric import sometrue
2750
2751    missing = (amplitudes == nan_ha)
2752    if sometrue (missing):
2753        if fail_on_NaN:
2754            msg = 'NetCDFFile %s contains missing values'\
2755                  %(basename_in+'_ha.nc')
2756            raise DataMissingValuesError, msg
2757        else:
2758            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2759
2760    missing = (uspeed == nan_ua)
2761    if sometrue (missing):
2762        if fail_on_NaN:
2763            msg = 'NetCDFFile %s contains missing values'\
2764                  %(basename_in+'_ua.nc')
2765            raise DataMissingValuesError, msg
2766        else:
2767            uspeed = uspeed*(missing==0) + missing*NaN_filler
2768
2769    missing = (vspeed == nan_va)
2770    if sometrue (missing):
2771        if fail_on_NaN:
2772            msg = 'NetCDFFile %s contains missing values'\
2773                  %(basename_in+'_va.nc')
2774            raise DataMissingValuesError, msg
2775        else:
2776            vspeed = vspeed*(missing==0) + missing*NaN_filler
2777
2778
2779    missing = (elevations == nan_e)
2780    if sometrue (missing):
2781        if fail_on_NaN:
2782            msg = 'NetCDFFile %s contains missing values'\
2783                  %(basename_in+'_e.nc')
2784            raise DataMissingValuesError, msg
2785        else:
2786            elevations = elevations*(missing==0) + missing*NaN_filler
2787
2788    #######
2789
2790
2791
2792    number_of_times = times.shape[0]
2793    number_of_latitudes = latitudes.shape[0]
2794    number_of_longitudes = longitudes.shape[0]
2795
2796    assert amplitudes.shape[0] == number_of_times
2797    assert amplitudes.shape[1] == number_of_latitudes
2798    assert amplitudes.shape[2] == number_of_longitudes
2799
2800    if verbose:
2801        print '------------------------------------------------'
2802        print 'Statistics:'
2803        print '  Extent (lat/lon):'
2804        print '    lat in [%f, %f], len(lat) == %d'\
2805              %(min(latitudes.flat), max(latitudes.flat),
2806                len(latitudes.flat))
2807        print '    lon in [%f, %f], len(lon) == %d'\
2808              %(min(longitudes.flat), max(longitudes.flat),
2809                len(longitudes.flat))
2810        print '    t in [%f, %f], len(t) == %d'\
2811              %(min(times.flat), max(times.flat), len(times.flat))
2812
2813        q = amplitudes.flat
2814        name = 'Amplitudes (ha) [cm]'
2815        print %s in [%f, %f]' %(name, min(q), max(q))
2816
2817        q = uspeed.flat
2818        name = 'Speeds (ua) [cm/s]'
2819        print %s in [%f, %f]' %(name, min(q), max(q))
2820
2821        q = vspeed.flat
2822        name = 'Speeds (va) [cm/s]'
2823        print %s in [%f, %f]' %(name, min(q), max(q))
2824
2825        q = elevations.flat
2826        name = 'Elevations (e) [m]'
2827        print %s in [%f, %f]' %(name, min(q), max(q))
2828
2829
2830    #print number_of_latitudes, number_of_longitudes
2831    number_of_points = number_of_latitudes*number_of_longitudes
2832    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2833
2834
2835    file_h.close()
2836    file_u.close()
2837    file_v.close()
2838    file_e.close()
2839
2840
2841    # NetCDF file definition
2842    outfile = NetCDFFile(swwname, 'w')
2843
2844    #Create new file
2845    outfile.institution = 'Geoscience Australia'
2846    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2847                          %(basename_in + '_ha.nc',
2848                            basename_in + '_ua.nc',
2849                            basename_in + '_va.nc',
2850                            basename_in + '_e.nc')
2851
2852
2853    #For sww compatibility
2854    outfile.smoothing = 'Yes'
2855    outfile.order = 1
2856
2857    #Start time in seconds since the epoch (midnight 1/1/1970)
2858    outfile.starttime = starttime = times[0]
2859    times = times - starttime  #Store relative times
2860
2861    # dimension definitions
2862    outfile.createDimension('number_of_volumes', number_of_volumes)
2863
2864    outfile.createDimension('number_of_vertices', 3)
2865    outfile.createDimension('number_of_points', number_of_points)
2866
2867
2868    #outfile.createDimension('number_of_timesteps', len(times))
2869    outfile.createDimension('number_of_timesteps', len(times))
2870
2871    # variable definitions
2872    outfile.createVariable('x', precision, ('number_of_points',))
2873    outfile.createVariable('y', precision, ('number_of_points',))
2874    outfile.createVariable('elevation', precision, ('number_of_points',))
2875
2876    #FIXME: Backwards compatibility
2877    outfile.createVariable('z', precision, ('number_of_points',))
2878    #################################
2879
2880    outfile.createVariable('volumes', Int, ('number_of_volumes',
2881                                            'number_of_vertices'))
2882
2883    outfile.createVariable('time', precision,
2884                           ('number_of_timesteps',))
2885
2886    outfile.createVariable('stage', precision,
2887                           ('number_of_timesteps',
2888                            'number_of_points'))
2889
2890    outfile.createVariable('xmomentum', precision,
2891                           ('number_of_timesteps',
2892                            'number_of_points'))
2893
2894    outfile.createVariable('ymomentum', precision,
2895                           ('number_of_timesteps',
2896                            'number_of_points'))
2897
2898
2899    #Store
2900    from anuga.coordinate_transforms.redfearn import redfearn
2901    x = zeros(number_of_points, Float)  #Easting
2902    y = zeros(number_of_points, Float)  #Northing
2903
2904
2905    if verbose: print 'Making triangular grid'
2906    #Check zone boundaries
2907    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2908
2909    vertices = {}
2910    i = 0
2911    for k, lat in enumerate(latitudes):       #Y direction
2912        for l, lon in enumerate(longitudes):  #X direction
2913
2914            vertices[l,k] = i
2915
2916            zone, easting, northing = redfearn(lat,lon)
2917
2918            msg = 'Zone boundary crossed at longitude =', lon
2919            #assert zone == refzone, msg
2920            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2921            x[i] = easting
2922            y[i] = northing
2923            i += 1
2924
2925
2926    #Construct 2 triangles per 'rectangular' element
2927    volumes = []
2928    for l in range(number_of_longitudes-1):    #X direction
2929        for k in range(number_of_latitudes-1): #Y direction
2930            v1 = vertices[l,k+1]
2931            v2 = vertices[l,k]
2932            v3 = vertices[l+1,k+1]
2933            v4 = vertices[l+1,k]
2934
2935            volumes.append([v1,v2,v3]) #Upper element
2936            volumes.append([v4,v3,v2]) #Lower element
2937
2938    volumes = array(volumes)
2939
2940    if origin == None:
2941        zone = refzone
2942        xllcorner = min(x)
2943        yllcorner = min(y)
2944    else:
2945        zone = origin[0]
2946        xllcorner = origin[1]
2947        yllcorner = origin[2]
2948
2949
2950    outfile.xllcorner = xllcorner
2951    outfile.yllcorner = yllcorner
2952    outfile.zone = zone
2953
2954
2955    if elevation is not None:
2956        z = elevation
2957    else:
2958        if inverted_bathymetry:
2959            z = -1*elevations
2960        else:
2961            z = elevations
2962    #FIXME: z should be obtained from MOST and passed in here
2963
2964    from Numeric import resize
2965    z = resize(z,outfile.variables['z'][:].shape)
2966    outfile.variables['x'][:] = x - xllcorner
2967    outfile.variables['y'][:] = y - yllcorner
2968    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2969    outfile.variables['elevation'][:] = z
2970    outfile.variables['time'][:] = times   #Store time relative
2971    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2972
2973
2974
2975    #Time stepping
2976    stage = outfile.variables['stage']
2977    xmomentum = outfile.variables['xmomentum']
2978    ymomentum = outfile.variables['ymomentum']
2979
2980    if verbose: print 'Converting quantities'
2981    n = len(times)
2982    for j in range(n):
2983        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2984        i = 0
2985        for k in range(number_of_latitudes):      #Y direction
2986            for l in range(number_of_longitudes): #X direction
2987                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2988                stage[j,i] = w
2989                h = w - z[i]
2990                xmomentum[j,i] = uspeed[j,k,l]/100*h
2991                ymomentum[j,i] = vspeed[j,k,l]/100*h
2992                i += 1
2993
2994    #outfile.close()
2995
2996    #FIXME: Refactor using code from file_function.statistics
2997    #Something like print swwstats(swwname)
2998    if verbose:
2999        x = outfile.variables['x'][:]
3000        y = outfile.variables['y'][:]
3001        times = outfile.variables['time'][:]
3002        print '------------------------------------------------'
3003        print 'Statistics of output file:'
3004        print '  Name: %s' %swwname
3005        print '  Reference:'
3006        print '    Lower left corner: [%f, %f]'\
3007              %(xllcorner, yllcorner)
3008        print '    Start time: %f' %starttime
3009        print '  Extent:'
3010        print '    x [m] in [%f, %f], len(x) == %d'\
3011              %(min(x.flat), max(x.flat), len(x.flat))
3012        print '    y [m] in [%f, %f], len(y) == %d'\
3013              %(min(y.flat), max(y.flat), len(y.flat))
3014        print '    t [s] in [%f, %f], len(t) == %d'\
3015              %(min(times), max(times), len(times))
3016        print '  Quantities [SI units]:'
3017        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3018            q = outfile.variables[name][:].flat
3019            print '    %s in [%f, %f]' %(name, min(q), max(q))
3020
3021
3022
3023    outfile.close()
3024
3025
3026
3027
3028
3029def timefile2netcdf(filename, quantity_names = None):
3030    """Template for converting typical text files with time series to
3031    NetCDF tms file.
3032
3033
3034    The file format is assumed to be either two fields separated by a comma:
3035
3036        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3037
3038    E.g
3039
3040      31/08/04 00:00:00, 1.328223 0 0
3041      31/08/04 00:15:00, 1.292912 0 0
3042
3043    will provide a time dependent function f(t) with three attributes
3044
3045    filename is assumed to be the rootname with extenisons .txt and .sww
3046    """
3047
3048    import time, calendar
3049    from Numeric import array
3050    from anuga.config import time_format
3051    from anuga.utilities.numerical_tools import ensure_numeric
3052
3053
3054
3055    fid = open(filename + '.txt')
3056    line = fid.readline()
3057    fid.close()
3058
3059    fields = line.split(',')
3060    msg = 'File %s must have the format date, value0 value1 value2 ...'
3061    assert len(fields) == 2, msg
3062
3063    try:
3064        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3065    except ValueError:
3066        msg = 'First field in file %s must be' %filename
3067        msg += ' date-time with format %s.\n' %time_format
3068        msg += 'I got %s instead.' %fields[0]
3069        raise DataTimeError, msg
3070
3071
3072    #Split values
3073    values = []
3074    for value in fields[1].split():
3075        values.append(float(value))
3076
3077    q = ensure_numeric(values)
3078
3079    msg = 'ERROR: File must contain at least one independent value'
3080    assert len(q.shape) == 1, msg
3081
3082
3083
3084    #Read times proper
3085    from Numeric import zeros, Float, alltrue
3086    from anuga.config import time_format
3087    import time, calendar
3088
3089    fid = open(filename + '.txt')
3090    lines = fid.readlines()
3091    fid.close()
3092
3093    N = len(lines)
3094    d = len(q)
3095
3096    T = zeros(N, Float)       #Time
3097    Q = zeros((N, d), Float)  #Values
3098
3099    for i, line in enumerate(lines):
3100        fields = line.split(',')
3101        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3102
3103        T[i] = realtime - starttime
3104
3105        for j, value in enumerate(fields[1].split()):
3106            Q[i, j] = float(value)
3107
3108    msg = 'File %s must list time as a monotonuosly ' %filename
3109    msg += 'increasing sequence'
3110    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3111
3112
3113    #Create NetCDF file
3114    from Scientific.IO.NetCDF import NetCDFFile
3115
3116    fid = NetCDFFile(filename + '.tms', 'w')
3117
3118
3119    fid.institution = 'Geoscience Australia'
3120    fid.description = 'Time series'
3121
3122
3123    #Reference point
3124    #Start time in seconds since the epoch (midnight 1/1/1970)
3125    #FIXME: Use Georef
3126    fid.starttime = starttime
3127
3128
3129    # dimension definitions
3130    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3131    #fid.createDimension('number_of_vertices', 3)
3132
3133
3134    fid.createDimension('number_of_timesteps', len(T))
3135
3136    fid.createVariable('time', Float, ('number_of_timesteps',))
3137
3138    fid.variables['time'][:] = T
3139
3140    for i in range(Q.shape[1]):
3141        try:
3142            name = quantity_names[i]
3143        except:
3144            name = 'Attribute%d'%i
3145
3146        fid.createVariable(name, Float, ('number_of_timesteps',))
3147        fid.variables[name][:] = Q[:,i]
3148
3149    fid.close()
3150
3151
3152def extent_sww(file_name):
3153    """
3154    Read in an sww file.
3155
3156    Input;
3157    file_name - the sww file
3158
3159    Output;
3160    z - Vector of bed elevation
3161    volumes - Array.  Each row has 3 values, representing
3162    the vertices that define the volume
3163    time - Vector of the times where there is stage information
3164    stage - array with respect to time and vertices (x,y)
3165    """
3166
3167
3168    from Scientific.IO.NetCDF import NetCDFFile
3169
3170    #Check contents
3171    #Get NetCDF
3172    fid = NetCDFFile(file_name, 'r')
3173
3174    # Get the variables
3175    x = fid.variables['x'][:]
3176    y = fid.variables['y'][:]
3177    stage = fid.variables['stage'][:]
3178    #print "stage",stage
3179    #print "stage.shap",stage.shape
3180    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3181    #print "min(stage)",min(stage)
3182
3183    fid.close()
3184
3185    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3186
3187
3188def sww2domain(filename,boundary=None,t=None,\
3189               fail_if_NaN=True,NaN_filler=0\
3190               ,verbose = False,very_verbose = False):
3191    """
3192    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3193
3194    Boundary is not recommended if domain.smooth is not selected, as it
3195    uses unique coordinates, but not unique boundaries. This means that
3196    the boundary file will not be compatable with the coordinates, and will
3197    give a different final boundary, or crash.
3198    """
3199    NaN=9.969209968386869e+036
3200    #initialise NaN.
3201
3202    from Scientific.IO.NetCDF import NetCDFFile
3203    from shallow_water import Domain
3204    from Numeric import asarray, transpose, resize
3205
3206    if verbose: print 'Reading from ', filename
3207    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3208    time = fid.variables['time']       #Timesteps
3209    if t is None:
3210        t = time[-1]
3211    time_interp = get_time_interp(time,t)
3212
3213    # Get the variables as Numeric arrays
3214    x = fid.variables['x'][:]             #x-coordinates of vertices
3215    y = fid.variables['y'][:]             #y-coordinates of vertices
3216    elevation = fid.variables['elevation']     #Elevation
3217    stage = fid.variables['stage']     #Water level
3218    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3219    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3220
3221    starttime = fid.starttime[0]
3222    volumes = fid.variables['volumes'][:] #Connectivity
3223    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3224
3225    conserved_quantities = []
3226    interpolated_quantities = {}
3227    other_quantities = []
3228
3229    # get geo_reference
3230    #sww files don't have to have a geo_ref
3231    try:
3232        geo_reference = Geo_reference(NetCDFObject=fid)
3233    except: #AttributeError, e:
3234        geo_reference = None
3235
3236    if verbose: print '    getting quantities'
3237    for quantity in fid.variables.keys():
3238        dimensions = fid.variables[quantity].dimensions
3239        if 'number_of_timesteps' in dimensions:
3240            conserved_quantities.append(quantity)
3241            interpolated_quantities[quantity]=\
3242                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3243        else: other_quantities.append(quantity)
3244
3245    other_quantities.remove('x')
3246    other_quantities.remove('y')
3247    other_quantities.remove('z')
3248    other_quantities.remove('volumes')
3249
3250    conserved_quantities.remove('time')
3251
3252    if verbose: print '    building domain'
3253    #    From domain.Domain:
3254    #    domain = Domain(coordinates, volumes,\
3255    #                    conserved_quantities = conserved_quantities,\
3256    #                    other_quantities = other_quantities,zone=zone,\
3257    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3258
3259    #   From shallow_water.Domain:
3260    coordinates=coordinates.tolist()
3261    volumes=volumes.tolist()
3262    #FIXME:should this be in mesh?(peter row)
3263    if fid.smoothing == 'Yes': unique = False
3264    else: unique = True
3265    if unique:
3266        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3267
3268
3269    try:
3270        domain = Domain(coordinates, volumes, boundary)
3271    except AssertionError, e:
3272        fid.close()
3273        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3274        raise DataDomainError, msg
3275
3276    if not boundary is None:
3277        domain.boundary = boundary
3278
3279    domain.geo_reference = geo_reference
3280
3281    domain.starttime=float(starttime)+float(t)
3282    domain.time=0.0
3283
3284    for quantity in other_quantities:
3285        try:
3286            NaN = fid.variables[quantity].missing_value
3287        except:
3288            pass #quantity has no missing_value number
3289        X = fid.variables[quantity][:]
3290        if very_verbose:
3291            print '       ',quantity
3292            print '        NaN =',NaN
3293            print '        max(X)'
3294            print '       ',max(X)
3295            print '        max(X)==NaN'
3296            print '       ',max(X)==NaN
3297            print ''
3298        if (max(X)==NaN) or (min(X)==NaN):
3299            if fail_if_NaN:
3300                msg = 'quantity "%s" contains no_data entry'%quantity
3301                raise DataMissingValuesError, msg
3302            else:
3303                data = (X<>NaN)
3304                X = (X*data)+(data==0)*NaN_filler
3305        if unique:
3306            X = resize(X,(len(X)/3,3))
3307        domain.set_quantity(quantity,X)
3308    #
3309    for quantity in conserved_quantities:
3310        try:
3311            NaN = fid.variables[quantity].missing_value
3312        except:
3313            pass #quantity has no missing_value number
3314        X = interpolated_quantities[quantity]
3315        if very_verbose:
3316            print '       ',quantity
3317            print '        NaN =',NaN
3318            print '        max(X)'
3319            print '       ',max(X)
3320            print '        max(X)==NaN'
3321            print '       ',max(X)==NaN
3322            print ''
3323        if (max(X)==NaN) or (min(X)==NaN):
3324            if fail_if_NaN:
3325                msg = 'quantity "%s" contains no_data entry'%quantity
3326                raise DataMissingValuesError, msg
3327            else:
3328                data = (X<>NaN)
3329                X = (X*data)+(data==0)*NaN_filler
3330        if unique:
3331            X = resize(X,(X.shape[0]/3,3))
3332        domain.set_quantity(quantity,X)
3333
3334    fid.close()
3335    return domain
3336
3337def interpolated_quantity(saved_quantity,time_interp):
3338
3339    #given an index and ratio, interpolate quantity with respect to time.
3340    index,ratio = time_interp
3341    Q = saved_quantity
3342    if ratio > 0:
3343        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3344    else:
3345        q = Q[index]
3346    #Return vector of interpolated values
3347    return q
3348
3349def get_time_interp(time,t=None):
3350    #Finds the ratio and index for time interpolation.
3351    #It is borrowed from previous abstract_2d_finite_volumes code.
3352    if t is None:
3353        t=time[-1]
3354        index = -1
3355        ratio = 0.
3356    else:
3357        T = time
3358        tau = t
3359        index=0
3360        msg = 'Time interval derived from file %s [%s:%s]'\
3361            %('FIXMEfilename', T[0], T[-1])
3362        msg += ' does not match model time: %s' %tau
3363        if tau < time[0]: raise DataTimeError, msg
3364        if tau > time[-1]: raise DataTimeError, msg
3365        while tau > time[index]: index += 1
3366        while tau < time[index]: index -= 1
3367        if tau == time[index]:
3368            #Protect against case where tau == time[-1] (last time)
3369            # - also works in general when tau == time[i]
3370            ratio = 0
3371        else:
3372            #t is now between index and index+1
3373            ratio = (tau - time[index])/(time[index+1] - time[index])
3374    return (index,ratio)
3375
3376
3377def weed(coordinates,volumes,boundary = None):
3378    if type(coordinates)=='array':
3379        coordinates = coordinates.tolist()
3380    if type(volumes)=='array':
3381        volumes = volumes.tolist()
3382
3383    unique = False
3384    point_dict = {}
3385    same_point = {}
3386    for i in range(len(coordinates)):
3387        point = tuple(coordinates[i])
3388        if point_dict.has_key(point):
3389            unique = True
3390            same_point[i]=point
3391            #to change all point i references to point j
3392        else:
3393            point_dict[point]=i
3394            same_point[i]=point
3395
3396    coordinates = []
3397    i = 0
3398    for point in point_dict.keys():
3399        point = tuple(point)
3400        coordinates.append(list(point))
3401        point_dict[point]=i
3402        i+=1
3403
3404
3405    for volume in volumes:
3406        for i in range(len(volume)):
3407            index = volume[i]
3408            if index>-1:
3409                volume[i]=point_dict[same_point[index]]
3410
3411    new_boundary = {}
3412    if not boundary is None:
3413        for segment in boundary.keys():
3414            point0 = point_dict[same_point[segment[0]]]
3415            point1 = point_dict[same_point[segment[1]]]
3416            label = boundary[segment]
3417            #FIXME should the bounday attributes be concaterated
3418            #('exterior, pond') or replaced ('pond')(peter row)
3419
3420            if new_boundary.has_key((point0,point1)):
3421                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3422                                              #+','+label
3423
3424            elif new_boundary.has_key((point1,point0)):
3425                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3426                                              #+','+label
3427            else: new_boundary[(point0,point1)]=label
3428
3429        boundary = new_boundary
3430
3431    return coordinates,volumes,boundary
3432
3433
3434def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3435                 verbose=False):
3436    """Read Digitial Elevation model from the following NetCDF format (.dem)
3437
3438    Example:
3439
3440    ncols         3121
3441    nrows         1800
3442    xllcorner     722000
3443    yllcorner     5893000
3444    cellsize      25
3445    NODATA_value  -9999
3446    138.3698 137.4194 136.5062 135.5558 ..........
3447
3448    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3449    """
3450
3451    import os
3452    from Scientific.IO.NetCDF import NetCDFFile
3453    from Numeric import Float, zeros, sum, reshape, equal
3454
3455    root = basename_in
3456    inname = root + '.dem'
3457
3458    #Open existing netcdf file to read
3459    infile = NetCDFFile(inname, 'r')
3460    if verbose: print 'Reading DEM from %s' %inname
3461
3462    #Read metadata
3463    ncols = infile.ncols[0]
3464    nrows = infile.nrows[0]
3465    xllcorner = infile.xllcorner[0]
3466    yllcorner = infile.yllcorner[0]
3467    cellsize = infile.cellsize[0]
3468    NODATA_value = infile.NODATA_value[0]
3469    zone = infile.zone[0]
3470    false_easting = infile.false_easting[0]
3471    false_northing = infile.false_northing[0]
3472    projection = infile.projection
3473    datum = infile.datum
3474    units = infile.units
3475
3476    dem_elevation = infile.variables['elevation']
3477
3478    #Get output file name
3479    if basename_out == None:
3480        outname = root + '_' + repr(cellsize_new) + '.dem'
3481    else:
3482        outname = basename_out + '.dem'
3483
3484    if verbose: print 'Write decimated NetCDF file to %s' %outname
3485
3486    #Determine some dimensions for decimated grid
3487    (nrows_stencil, ncols_stencil) = stencil.shape
3488    x_offset = ncols_stencil / 2
3489    y_offset = nrows_stencil / 2
3490    cellsize_ratio = int(cellsize_new / cellsize)
3491    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3492    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3493
3494    #Open netcdf file for output
3495    outfile = NetCDFFile(outname, 'w')
3496
3497    #Create new file
3498    outfile.institution = 'Geoscience Australia'
3499    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3500                           'of spatial point data'
3501    #Georeferencing
3502    outfile.zone = zone
3503    outfile.projection = projection
3504    outfile.datum = datum
3505    outfile.units = units
3506
3507    outfile.cellsize = cellsize_new
3508    outfile.NODATA_value = NODATA_value
3509    outfile.false_easting = false_easting
3510    outfile.false_northing = false_northing
3511
3512    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3513    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3514    outfile.ncols = ncols_new
3515    outfile.nrows = nrows_new
3516
3517    # dimension definition
3518    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3519
3520    # variable definition
3521    outfile.createVariable('elevation', Float, ('number_of_points',))
3522
3523    # Get handle to the variable
3524    elevation = outfile.variables['elevation']
3525
3526    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3527
3528    #Store data
3529    global_index = 0
3530    for i in range(nrows_new):
3531        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3532        lower_index = global_index
3533        telev =  zeros(ncols_new, Float)
3534        local_index = 0
3535        trow = i * cellsize_ratio
3536
3537        for j in range(ncols_new):
3538            tcol = j * cellsize_ratio
3539            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3540
3541            #if dem contains 1 or more NODATA_values set value in
3542            #decimated dem to NODATA_value, else compute decimated
3543            #value using stencil
3544            if sum(sum(equal(tmp, NODATA_value))) > 0:
3545                telev[local_index] = NODATA_value
3546            else:
3547                telev[local_index] = sum(sum(tmp * stencil))
3548
3549            global_index += 1
3550            local_index += 1
3551
3552        upper_index = global_index
3553
3554        elevation[lower_index:upper_index] = telev
3555
3556    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3557
3558    infile.close()
3559    outfile.close()
3560
3561
3562
3563
3564def tsh2sww(filename, verbose=False): #test_tsh2sww
3565    """
3566    to check if a tsh/msh file 'looks' good.
3567    """
3568
3569    from shallow_water import Domain
3570    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
3571    import time, os
3572    #from data_manager import get_dataobject
3573    from os import sep, path
3574    from anuga.utilities.numerical_tools import mean
3575
3576    if verbose == True:print 'Creating domain from', filename
3577    domain = pmesh_to_domain_instance(filename, Domain)
3578    if verbose == True:print "Number of triangles = ", len(domain)
3579
3580    domain.smooth = True
3581    domain.format = 'sww'   #Native netcdf visualisation format
3582    file_path, filename = path.split(filename)
3583    filename, ext = path.splitext(filename)
3584    domain.set_name(filename)   
3585    domain.reduction = mean
3586    if verbose == True:print "file_path",file_path
3587    if file_path == "":file_path = "."
3588    domain.set_datadir(file_path)
3589
3590    if verbose == True:
3591        print "Output written to " + domain.get_datadir() + sep + \
3592              domain.get_name() + "." + domain.format
3593    sww = get_dataobject(domain)
3594    sww.store_connectivity()
3595    sww.store_timestep('stage')
3596
3597
3598def asc_csiro2sww(bath_dir,
3599                  elevation_dir,
3600                  ucur_dir,
3601                  vcur_dir,
3602                  sww_file,
3603                  minlat = None, maxlat = None,
3604                  minlon = None, maxlon = None,
3605                  zscale=1,
3606                  mean_stage = 0,
3607                  fail_on_NaN = True,
3608                  elevation_NaN_filler = 0,
3609                  bath_prefix='ba',
3610                  elevation_prefix='el',
3611                  verbose=False):
3612    """
3613    Produce an sww boundary file, from esri ascii data from CSIRO.
3614
3615    Also convert latitude and longitude to UTM. All coordinates are
3616    assumed to be given in the GDA94 datum.
3617
3618    assume:
3619    All files are in esri ascii format
3620
3621    4 types of information
3622    bathymetry
3623    elevation
3624    u velocity
3625    v velocity
3626
3627    Assumptions
3628    The metadata of all the files is the same
3629    Each type is in a seperate directory
3630    One bath file with extention .000
3631    The time period is less than 24hrs and uniform.
3632    """
3633    from Scientific.IO.NetCDF import NetCDFFile
3634
3635    from anuga.coordinate_transforms.redfearn import redfearn
3636
3637    precision = Float # So if we want to change the precision its done here
3638
3639    # go in to the bath dir and load the only file,
3640    bath_files = os.listdir(bath_dir)
3641    #print "bath_files",bath_files
3642
3643    #fixme: make more general?
3644    bath_file = bath_files[0]
3645    bath_dir_file =  bath_dir + os.sep + bath_file
3646    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3647    #print "bath_metadata",bath_metadata
3648    #print "bath_grid",bath_grid
3649
3650    #Use the date.time of the bath file as a basis for
3651    #the start time for other files
3652    base_start = bath_file[-12:]
3653
3654    #go into the elevation dir and load the 000 file
3655    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3656                         + base_start
3657    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3658    #print "elevation_dir_file",elevation_dir_file
3659    #print "elevation_grid", elevation_grid
3660
3661    elevation_files = os.listdir(elevation_dir)
3662    ucur_files = os.listdir(ucur_dir)
3663    vcur_files = os.listdir(vcur_dir)
3664
3665    # the first elevation file should be the
3666    # file with the same base name as the bath data
3667    #print "elevation_files[0]",elevation_files[0]
3668    #print "'el' + base_start",'el' + base_start
3669    assert elevation_files[0] == 'el' + base_start
3670
3671    #assert bath_metadata == elevation_metadata
3672
3673
3674
3675    number_of_latitudes = bath_grid.shape[0]
3676    number_of_longitudes = bath_grid.shape[1]
3677    #number_of_times = len(os.listdir(elevation_dir))
3678    #number_of_points = number_of_latitudes*number_of_longitudes
3679    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3680
3681    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3682                  for x in range(number_of_longitudes)]
3683    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3684                 for y in range(number_of_latitudes)]
3685
3686     # reverse order of lat, so the fist lat represents the first grid row
3687    latitudes.reverse()
3688
3689    #print "latitudes - before _get_min_max_indexes",latitudes
3690    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3691                                                 minlat=minlat, maxlat=maxlat,
3692                                                 minlon=minlon, maxlon=maxlon)
3693
3694
3695    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3696    #print "bath_grid",bath_grid
3697    latitudes = latitudes[kmin:kmax]
3698    longitudes = longitudes[lmin:lmax]
3699    number_of_latitudes = len(latitudes)
3700    number_of_longitudes = len(longitudes)
3701    number_of_times = len(os.listdir(elevation_dir))
3702    number_of_points = number_of_latitudes*number_of_longitudes
3703    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3704    #print "number_of_points",number_of_points
3705
3706    #Work out the times
3707    if len(elevation_files) > 1:
3708        # Assume: The time period is less than 24hrs.
3709        time_period = (int(elevation_files[1][-3:]) - \
3710                      int(elevation_files[0][-3:]))*60*60
3711        times = [x*time_period for x in range(len(elevation_files))]
3712    else:
3713        times = [0.0]
3714    #print "times", times
3715    #print "number_of_latitudes", number_of_latitudes
3716    #print "number_of_longitudes", number_of_longitudes
3717    #print "number_of_times", number_of_times
3718    #print "latitudes", latitudes
3719    #print "longitudes", longitudes
3720
3721
3722    if verbose:
3723        print '------------------------------------------------'
3724        print 'Statistics:'
3725        print '  Extent (lat/lon):'
3726        print '    lat in [%f, %f], len(lat) == %d'\
3727              %(min(latitudes), max(latitudes),
3728                len(latitudes))
3729        print '    lon in [%f, %f], len(lon) == %d'\
3730              %(min(longitudes), max(longitudes),
3731                len(longitudes))
3732        print '    t in [%f, %f], len(t) == %d'\
3733              %(min(times), max(times), len(times))
3734
3735    ######### WRITE THE SWW FILE #############
3736    # NetCDF file definition
3737    outfile = NetCDFFile(sww_file, 'w')
3738
3739    #Create new file
3740    outfile.institution = 'Geoscience Australia'
3741    outfile.description = 'Converted from XXX'
3742
3743
3744    #For sww compatibility
3745    outfile.smoothing = 'Yes'
3746    outfile.order = 1
3747
3748    #Start time in seconds since the epoch (midnight 1/1/1970)
3749    outfile.starttime = starttime = times[0]
3750
3751
3752    # dimension definitions
3753    outfile.createDimension('number_of_volumes', number_of_volumes)
3754
3755    outfile.createDimension('number_of_vertices', 3)
3756    outfile.createDimension('number_of_points', number_of_points)
3757    outfile.createDimension('number_of_timesteps', number_of_times)
3758
3759    # variable definitions
3760    outfile.createVariable('x', precision, ('number_of_points',))
3761    outfile.createVariable('y', precision, ('number_of_points',))
3762    outfile.createVariable('elevation', precision, ('number_of_points',))
3763
3764    #FIXME: Backwards compatibility
3765    outfile.createVariable('z', precision, ('number_of_points',))
3766    #################################
3767
3768    outfile.createVariable('volumes', Int, ('number_of_volumes',
3769                                            'number_of_vertices'))
3770
3771    outfile.createVariable('time', precision,
3772                           ('number_of_timesteps',))
3773
3774    outfile.createVariable('stage', precision,
3775                           ('number_of_timesteps',
3776                            'number_of_points'))
3777
3778    outfile.createVariable('xmomentum', precision,
3779                           ('number_of_timesteps',
3780                            'number_of_points'))
3781
3782    outfile.createVariable('ymomentum', precision,
3783                           ('number_of_timesteps',
3784                            'number_of_points'))
3785
3786    #Store
3787    from anuga.coordinate_transforms.redfearn import redfearn
3788    x = zeros(number_of_points, Float)  #Easting
3789    y = zeros(number_of_points, Float)  #Northing
3790
3791    if verbose: print 'Making triangular grid'
3792    #Get zone of 1st point.
3793    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3794
3795    vertices = {}
3796    i = 0
3797    for k, lat in enumerate(latitudes):
3798        for l, lon in enumerate(longitudes):
3799
3800            vertices[l,k] = i
3801
3802            zone, easting, northing = redfearn(lat,lon)
3803
3804            msg = 'Zone boundary crossed at longitude =', lon
3805            #assert zone == refzone, msg
3806            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3807            x[i] = easting
3808            y[i] = northing
3809            i += 1
3810
3811
3812    #Construct 2 triangles per 'rectangular' element
3813    volumes = []
3814    for l in range(number_of_longitudes-1):    #X direction
3815        for k in range(number_of_latitudes-1): #Y direction
3816            v1 = vertices[l,k+1]
3817            v2 = vertices[l,k]
3818            v3 = vertices[l+1,k+1]
3819            v4 = vertices[l+1,k]
3820
3821            #Note, this is different to the ferrit2sww code
3822            #since the order of the lats is reversed.
3823            volumes.append([v1,v3,v2]) #Upper element
3824            volumes.append([v4,v2,v3]) #Lower element
3825
3826    volumes = array(volumes)
3827
3828    geo_ref = Geo_reference(refzone,min(x),min(y))
3829    geo_ref.write_NetCDF(outfile)
3830
3831    # This will put the geo ref in the middle
3832    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3833
3834
3835    if verbose:
3836        print '------------------------------------------------'
3837        print 'More Statistics:'
3838        print '  Extent (/lon):'
3839        print '    x in [%f, %f], len(lat) == %d'\
3840              %(min(x), max(x),
3841                len(x))
3842        print '    y in [%f, %f], len(lon) == %d'\
3843              %(min(y), max(y),
3844                len(y))
3845        print 'geo_ref: ',geo_ref
3846
3847    z = resize(bath_grid,outfile.variables['z'][:].shape)
3848    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3849    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3850    outfile.variables['z'][:] = z
3851    outfile.variables['elevation'][:] = z  #FIXME HACK
3852    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3853
3854    # do this to create an ok sww file.
3855    #outfile.variables['time'][:] = [0]   #Store time relative
3856    #outfile.variables['stage'] = z
3857    # put the next line up in the code after outfile.order = 1
3858    #number_of_times = 1
3859
3860    stage = outfile.variables['stage']
3861    xmomentum = outfile.variables['xmomentum']
3862    ymomentum = outfile.variables['ymomentum']
3863
3864    outfile.variables['time'][:] = times   #Store time relative
3865
3866    if verbose: print 'Converting quantities'
3867    n = number_of_times
3868    for j in range(number_of_times):
3869        # load in files
3870        elevation_meta, elevation_grid = \
3871           _read_asc(elevation_dir + os.sep + elevation_files[j])
3872
3873        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3874        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3875
3876        #print "elevation_grid",elevation_grid
3877        #cut matrix to desired size
3878        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3879        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3880        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3881        #print "elevation_grid",elevation_grid
3882        # handle missing values
3883        missing = (elevation_grid == elevation_meta['NODATA_value'])
3884        if sometrue (missing):
3885            if fail_on_NaN:
3886                msg = 'File %s contains missing values'\
3887                      %(elevation_files[j])
3888                raise DataMissingValuesError, msg
3889            else:
3890                elevation_grid = elevation_grid*(missing==0) + \
3891                                 missing*elevation_NaN_filler
3892
3893
3894        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3895        i = 0
3896        for k in range(number_of_latitudes):      #Y direction
3897            for l in range(number_of_longitudes): #X direction
3898                w = zscale*elevation_grid[k,l] + mean_stage
3899                stage[j,i] = w
3900                h = w - z[i]
3901                xmomentum[j,i] = u_momentum_grid[k,l]*h
3902                ymomentum[j,i] = v_momentum_grid[k,l]*h
3903                i += 1
3904    outfile.close()
3905
3906def _get_min_max_indexes(latitudes,longitudes,
3907                        minlat=None, maxlat=None,
3908                        minlon=None, maxlon=None):
3909    """
3910    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3911    specified with min/max lat/long
3912
3913    Think of the latitudes and longitudes describing a 2d surface.
3914    The area returned is, if possible, just big enough to cover the
3915    inputed max/min area. (This will not be possible if the max/min area
3916    has a section outside of the latitudes/longitudes area.)
3917
3918    assume latitudess & longitudes are sorted,
3919    long - from low to high (west to east, eg 148 - 151)
3920    lat - from high to low (north to south, eg -35 - -38)
3921    """
3922
3923    # reverse order of lat, so it's in ascending order
3924    latitudes.reverse()
3925    largest_lat_index = len(latitudes)-1
3926    #Cut out a smaller extent.
3927    if minlat == None:
3928        lat_min_index = 0
3929    else:
3930        lat_min_index = searchsorted(latitudes, minlat)-1
3931        if lat_min_index <0:
3932            lat_min_index = 0
3933
3934
3935    if maxlat == None:
3936        lat_max_index = largest_lat_index #len(latitudes)
3937    else:
3938        lat_max_index = searchsorted(latitudes, maxlat)
3939        if lat_max_index > largest_lat_index:
3940            lat_max_index = largest_lat_index
3941
3942    if minlon == None:
3943        lon_min_index = 0
3944    else:
3945        lon_min_index = searchsorted(longitudes, minlon)-1
3946        if lon_min_index <0:
3947            lon_min_index = 0
3948
3949    if maxlon == None:
3950        lon_max_index = len(longitudes)
3951    else:
3952        lon_max_index = searchsorted(longitudes, maxlon)
3953
3954    #Take into account that the latitude list was reversed
3955    latitudes.reverse() # Python passes by reference, need to swap back
3956    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3957                                   largest_lat_index - lat_min_index
3958    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3959    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3960
3961    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3962
3963
3964def _read_asc(filename, verbose=False):
3965    """Read esri file from the following ASCII format (.asc)
3966
3967    Example:
3968
3969    ncols         3121
3970    nrows         1800
3971    xllcorner     722000
3972    yllcorner     5893000
3973    cellsize      25
3974    NODATA_value  -9999
3975    138.3698 137.4194 136.5062 135.5558 ..........
3976
3977    """
3978
3979    datafile = open(filename)
3980
3981    if verbose: print 'Reading DEM from %s' %(filename)
3982    lines = datafile.readlines()
3983    datafile.close()
3984
3985    if verbose: print 'Got', len(lines), ' lines'
3986
3987    ncols = int(lines.pop(0).split()[1].strip())
3988    nrows = int(lines.pop(0).split()[1].strip())
3989    xllcorner = float(lines.pop(0).split()[1].strip())
3990    yllcorner = float(lines.pop(0).split()[1].strip())
3991    cellsize = float(lines.pop(0).split()[1].strip())
3992    NODATA_value = float(lines.pop(0).split()[1].strip())
3993
3994    assert len(lines) == nrows
3995
3996    #Store data
3997    grid = []
3998
3999    n = len(lines)
4000    for i, line in enumerate(lines):
4001        cells = line.split()
4002        assert len(cells) == ncols
4003        grid.append(array([float(x) for x in cells]))
4004    grid = array(grid)
4005
4006    return {'xllcorner':xllcorner,
4007            'yllcorner':yllcorner,
4008            'cellsize':cellsize,
4009            'NODATA_value':NODATA_value}, grid
4010
4011
4012
4013    ####  URS 2 SWW  ###
4014
4015lon_name = 'LON'
4016lat_name = 'LAT'
4017time_name = 'TIME'
4018precision = Float # So if we want to change the precision its done here       
4019class Write_nc:
4020    """
4021    Write an nc file.
4022   
4023    Note, this should be checked to meet cdc netcdf conventions for gridded
4024    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4025   
4026    """
4027    def __init__(self,
4028                 quantity_name,
4029                 file_name,
4030                 time_step_count,
4031                 time_step,
4032                 lon,
4033                 lat):
4034        """
4035        time_step_count is the number of time steps.
4036        time_step is the time step size
4037       
4038        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4039        """
4040        self.quantity_name = quantity_name
4041        quantity_units= {'HA':'METERS',
4042                              'UA':'METERS/SECOND',
4043                              'VA':'METERS/SECOND'}
4044       
4045        #self.file_name = file_name
4046        self.time_step_count = time_step_count
4047        self.time_step = time_step
4048
4049        # NetCDF file definition
4050        self.outfile = NetCDFFile(file_name, 'w')
4051        outfile = self.outfile       
4052
4053        #Create new file
4054        nc_lon_lat_header(outfile, lon, lat)
4055   
4056        # TIME
4057        outfile.createDimension(time_name, None)
4058        outfile.createVariable(time_name, precision, (time_name,))
4059
4060        #QUANTITY
4061        outfile.createVariable(self.quantity_name, precision,
4062                               (time_name, lat_name, lon_name))
4063        outfile.variables[self.quantity_name].missing_value=-1.e+034
4064        outfile.variables[self.quantity_name].units= \
4065                                 quantity_units[self.quantity_name]
4066        outfile.variables[lon_name][:]= ensure_numeric(lon)
4067        outfile.variables[lat_name][:]= ensure_numeric(lat)
4068
4069        #Assume no one will be wanting to read this, while we are writing
4070        #outfile.close()
4071       
4072    def store_timestep(self, quantity_slice):
4073        """
4074        quantity_slice is the data to be stored at this time step
4075        """
4076       
4077        outfile = self.outfile
4078       
4079        # Get the variables
4080        time = outfile.variables[time_name]
4081        quantity = outfile.variables[self.quantity_name]
4082           
4083        i = len(time)
4084
4085        #Store time
4086        time[i] = i*self.time_step #self.domain.time
4087        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4088                                           # And m/s to cm/sec
4089       
4090    def close(self):
4091        self.outfile.close()
4092
4093def urs2sww(basename_in='o', basename_out=None, verbose=False,
4094            remove_nc_files=True,
4095            minlat=None, maxlat=None,
4096            minlon= None, maxlon=None,
4097            mint=None, maxt=None,
4098            mean_stage=0,
4099            origin = None,
4100            zscale=1,
4101            fail_on_NaN=True,
4102            NaN_filler=0,
4103            elevation=None):
4104    """
4105    Convert URS C binary format for wave propagation to
4106    sww format native to abstract_2d_finite_volumes.
4107
4108    Specify only basename_in and read files of the form
4109    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4110    relative height, x-velocity and y-velocity, respectively.
4111
4112    Also convert latitude and longitude to UTM. All coordinates are
4113    assumed to be given in the GDA94 datum. The latitude and longitude
4114    information is for  a grid.
4115
4116    min's and max's: If omitted - full extend is used.
4117    To include a value min may equal it, while max must exceed it.
4118    Lat and lon are assumed to be in decimal degrees.
4119    NOTE: min Lon is the most east boundary.
4120   
4121    origin is a 3-tuple with geo referenced
4122    UTM coordinates (zone, easting, northing)
4123    It will be the origin of the sww file. This shouldn't be used,
4124    since all of anuga should be able to handle an arbitary origin.
4125
4126
4127    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4128    which means that latitude is the fastest
4129    varying dimension (row major order, so to speak)
4130
4131    In URS C binary the latitudes and longitudes are in assending order.
4132    """
4133   
4134    if basename_out == None:
4135        basename_out = basename_in
4136    files_out = urs2nc(basename_in, basename_out)
4137    ferret2sww(basename_out,
4138               minlat=minlat,
4139               maxlat=maxlat,
4140               minlon=minlon,
4141               mint=mint,
4142               maxt=maxt,
4143               mean_stage=mean_stage,
4144               origin=origin,
4145               zscale=zscale,
4146               fail_on_NaN=fail_on_NaN,
4147               NaN_filler=NaN_filler,
4148               inverted_bathymetry=True,
4149               verbose=verbose)
4150    #print "files_out",files_out
4151    if remove_nc_files:
4152        for file_out in files_out:
4153            os.remove(file_out)
4154   
4155def urs2nc(basename_in = 'o', basename_out = 'urs'):
4156    """
4157    Convert the 3 urs files to 4 nc files.
4158
4159    The name of the urs file names must be;
4160    [basename_in]-z-mux
4161    [basename_in]-e-mux
4162    [basename_in]-n-mux
4163   
4164    """
4165    files_in = [basename_in+'-z-mux',
4166                basename_in+'-e-mux',
4167                basename_in+'-n-mux']
4168    files_out = [basename_out+'_ha.nc',
4169                 basename_out+'_ua.nc',
4170                 basename_out+'_va.nc']
4171    quantities = ['HA','UA','VA']
4172
4173    for file_name in files_in:
4174        if os.access(file_name, os.F_OK) == 0 :
4175            msg = 'File %s does not exist or is not accessible' %file_name
4176            raise IOError, msg
4177       
4178    hashed_elevation = None
4179    for file_in, file_out, quantity in map(None, files_in,
4180                                           files_out,
4181                                           quantities):
4182        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4183                                         file_out,
4184                                         quantity)
4185        if hashed_elevation == None:
4186            elevation_file = basename_out+'_e.nc'
4187            write_elevation_nc(elevation_file,
4188                                lon,
4189                                lat,
4190                                depth)
4191            hashed_elevation = myhash(lonlatdep)
4192        else:
4193            msg = "The elevation information in the mux files is inconsistent"
4194            assert hashed_elevation == myhash(lonlatdep), msg
4195    files_out.append(elevation_file)
4196    return files_out
4197   
4198def _binary_c2nc(file_in, file_out, quantity):
4199    """
4200    Reads in a quantity urs file and writes a quantity nc file.
4201    additionally, returns the depth and lat, long info,
4202    so it can be written to a file.
4203    """
4204    columns = 3 # long, lat , depth
4205    mux_file = open(file_in, 'rb')
4206   
4207    # Number of points/stations
4208    (points_num,)= unpack('i',mux_file.read(4))
4209
4210    # nt, int - Number of time steps
4211    (time_step_count,)= unpack('i',mux_file.read(4))
4212
4213    #dt, float - time step, seconds
4214    (time_step,) = unpack('f', mux_file.read(4))
4215   
4216    msg = "Bad data in the mux file."
4217    if points_num < 0:
4218        mux_file.close()
4219        raise ANUGAError, msg
4220    if time_step_count < 0:
4221        mux_file.close()
4222        raise ANUGAError, msg
4223    if time_step < 0:
4224        mux_file.close()
4225        raise ANUGAError, msg
4226   
4227    lonlatdep = p_array.array('f')
4228    lonlatdep.read(mux_file, columns * points_num)
4229    lonlatdep = array(lonlatdep, typecode=Float)   
4230    lonlatdep = reshape(lonlatdep, (points_num, columns))
4231   
4232    lon, lat, depth = lon_lat2grid(lonlatdep)
4233    lon_sorted = list(lon)
4234    lon_sorted.sort()
4235
4236    if not lon == lon_sorted:
4237        msg = "Longitudes in mux file are not in ascending order"
4238        raise IOError, msg
4239    lat_sorted = list(lat)
4240    lat_sorted.sort()
4241
4242    if not lat == lat_sorted:
4243        msg = "Latitudes in mux file are not in ascending order"
4244   
4245    nc_file = Write_nc(quantity,
4246                       file_out,
4247                       time_step_count,
4248                       time_step,
4249                       lon,
4250                       lat)
4251
4252    for i in range(time_step_count):
4253        #Read in a time slice  from mux file 
4254        hz_p_array = p_array.array('f')
4255        hz_p_array.read(mux_file, points_num)
4256        hz_p = array(hz_p_array, typecode=Float)
4257        hz_p = reshape(hz_p, (len(lon), len(lat)))
4258        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4259
4260        #write time slice to nc file
4261        nc_file.store_timestep(hz_p)
4262    mux_file.close()
4263    nc_file.close()
4264
4265    return lonlatdep, lon, lat, depth
4266   
4267
4268def write_elevation_nc(file_out, lon, lat, depth_vector):
4269    """
4270    Write an nc elevation file.
4271    """
4272   
4273    # NetCDF file definition
4274    outfile = NetCDFFile(file_out, 'w')
4275
4276    #Create new file
4277    nc_lon_lat_header(outfile, lon, lat)
4278   
4279    # ELEVATION
4280    zname = 'ELEVATION'
4281    outfile.createVariable(zname, precision, (lat_name, lon_name))
4282    outfile.variables[zname].units='CENTIMETERS'
4283    outfile.variables[zname].missing_value=-1.e+034
4284
4285    outfile.variables[lon_name][:]= ensure_numeric(lon)
4286    outfile.variables[lat_name][:]= ensure_numeric(lat)
4287
4288    depth = reshape(depth_vector, ( len(lat), len(lon)))
4289    outfile.variables[zname][:]= depth
4290   
4291    outfile.close()
4292   
4293def nc_lon_lat_header(outfile, lon, lat):
4294    """
4295    outfile is the netcdf file handle.
4296    lon - a list/array of the longitudes
4297    lat - a list/array of the latitudes
4298    """
4299   
4300    outfile.institution = 'Geoscience Australia'
4301    outfile.description = 'Converted from URS binary C'
4302   
4303    # Longitude
4304    outfile.createDimension(lon_name, len(lon))
4305    outfile.createVariable(lon_name, precision, (lon_name,))
4306    outfile.variables[lon_name].point_spacing='uneven'
4307    outfile.variables[lon_name].units='degrees_east'
4308    outfile.variables[lon_name].assignValue(lon)
4309
4310
4311    # Latitude
4312    outfile.createDimension(lat_name, len(lat))
4313    outfile.createVariable(lat_name, precision, (lat_name,))
4314    outfile.variables[lat_name].point_spacing='uneven'
4315    outfile.variables[lat_name].units='degrees_north'
4316    outfile.variables[lat_name].assignValue(lat)
4317
4318
4319   
4320def lon_lat2grid(long_lat_dep):
4321    """
4322    given a list of points that are assumed to be an a grid,
4323    return the long's and lat's of the grid.
4324    long_lat_dep is an array where each row is a position.
4325    The first column is longitudes.
4326    The second column is latitudes.
4327
4328    The latitude is the fastest varying dimension - in mux files
4329    """
4330    LONG = 0
4331    LAT = 1
4332    QUANTITY = 2
4333
4334    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4335   
4336    num_points = long_lat_dep.shape[0]
4337    this_rows_long = long_lat_dep[0,LONG]
4338
4339    # Count the length of unique latitudes
4340    i = 0
4341    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4342        i += 1
4343    # determine the lats and longsfrom the grid
4344    lat = long_lat_dep[:i, LAT]       
4345    long = long_lat_dep[::i, LONG]
4346   
4347    lenlong = len(long)
4348    lenlat = len(lat)
4349    #print 'len lat', lat, len(lat)
4350    #print 'len long', long, len(long)
4351         
4352    msg = 'Input data is not gridded'     
4353    assert num_points % lenlat == 0, msg
4354    assert num_points % lenlong == 0, msg
4355         
4356    # Test that data is gridded       
4357    for i in range(lenlong):
4358        msg = 'Data is not gridded.  It must be for this operation'
4359        first = i*lenlat
4360        last = first + lenlat
4361               
4362        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4363        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4364   
4365   
4366#    print 'range long', min(long), max(long)
4367#    print 'range lat', min(lat), max(lat)
4368#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4369#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4370   
4371   
4372   
4373    msg = 'Out of range latitudes/longitudes'
4374    for l in lat:assert -90 < l < 90 , msg
4375    for l in long:assert -180 < l < 180 , msg
4376
4377    # Changing quantity from lat being the fastest varying dimension to
4378    # long being the fastest varying dimension
4379    # FIXME - make this faster/do this a better way
4380    # use numeric transpose, after reshaping the quantity vector
4381#    quantity = zeros(len(long_lat_dep), Float)
4382    quantity = zeros(num_points, Float)
4383   
4384#    print 'num',num_points
4385    for lat_i, _ in enumerate(lat):
4386        for long_i, _ in enumerate(long):
4387            q_index = lat_i*lenlong+long_i
4388            lld_index = long_i*lenlat+lat_i
4389#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4390            temp = long_lat_dep[lld_index, QUANTITY]
4391            quantity[q_index] = temp
4392           
4393    return long, lat, quantity
4394
4395    ####  END URS 2 SWW  ###     
4396#-------------------------------------------------------------
4397if __name__ == "__main__":
4398    pass
4399
Note: See TracBrowser for help on using the repository browser.