source: anuga_core/source/anuga/shallow_water/data_manager.py @ 3694

Last change on this file since 3694 was 3694, checked in by duncan, 17 years ago

In ferret2sww set default of inverted_bathymetry to True

File size: 122.8 KB
RevLine 
[2852]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
[3535]33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
[2852]34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
[3560]47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
[2852]48
49"""
50
51import exceptions
[3292]52class TitleValueError(exceptions.Exception): pass
[2852]53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
[3292]60import csv
[2852]61import os
62
63from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
64     searchsorted, zeros, allclose, around
65
66
[3514]67from anuga.coordinate_transforms.geo_reference import Geo_reference
68from anuga.geospatial_data.geospatial_data import Geospatial_data
[3642]69from anuga.config import minimum_storable_height as default_minimum_storable_height
[2852]70
71def make_filename(s):
72    """Transform argument string into a suitable filename
73    """
74
75    s = s.strip()
76    s = s.replace(' ', '_')
77    s = s.replace('(', '')
78    s = s.replace(')', '')
79    s = s.replace('__', '_')
80
81    return s
82
83
84def check_dir(path, verbose=None):
85    """Check that specified path exists.
86    If path does not exist it will be created if possible
87
88    USAGE:
89       checkdir(path, verbose):
90
91    ARGUMENTS:
92        path -- Directory
93        verbose -- Flag verbose output (default: None)
94
95    RETURN VALUE:
96        Verified path including trailing separator
97
98    """
99
100    import os, sys
101    import os.path
102
103    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
104        unix = 0
105    else:
106        unix = 1
107
108
109    if path[-1] != os.sep:
110        path = path + os.sep  # Add separator for directories
111
112    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
113    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
114        try:
115            exitcode=os.mkdir(path)
116
117            # Change access rights if possible
118            #
119            if unix:
120                exitcode=os.system('chmod 775 '+path)
121            else:
122                pass  # FIXME: What about acces rights under Windows?
123
124            if verbose: print 'MESSAGE: Directory', path, 'created.'
125
126        except:
127            print 'WARNING: Directory', path, 'could not be created.'
128            if unix:
129                path = '/tmp/'
130            else:
131                path = 'C:'
132
133            print 'Using directory %s instead' %path
134
135    return(path)
136
137
138
139def del_dir(path):
140    """Recursively delete directory path and all its contents
141    """
142
143    import os
144
145    if os.path.isdir(path):
146        for file in os.listdir(path):
147            X = os.path.join(path, file)
148
149
150            if os.path.isdir(X) and not os.path.islink(X):
151                del_dir(X)
152            else:
153                try:
154                    os.remove(X)
155                except:
156                    print "Could not remove file %s" %X
157
158        os.rmdir(path)
159
160
161
162def create_filename(datadir, filename, format, size=None, time=None):
163
164    import os
[3514]165    #from anuga.config import data_dir
[2852]166
167    FN = check_dir(datadir) + filename
168
169    if size is not None:
170        FN += '_size%d' %size
171
172    if time is not None:
173        FN += '_time%.2f' %time
174
175    FN += '.' + format
176    return FN
177
178
179def get_files(datadir, filename, format, size):
180    """Get all file (names) with gven name, size and format
181    """
182
183    import glob
184
185    import os
[3514]186    #from anuga.config import data_dir
[2852]187
188    dir = check_dir(datadir)
189
190    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
191    return glob.glob(pattern)
192
193
194
195#Generic class for storing output to e.g. visualisation or checkpointing
196class Data_format:
197    """Generic interface to data formats
198    """
199
200
201    def __init__(self, domain, extension, mode = 'w'):
202        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
203                                        '''   'w' (write)'''+\
204                                        '''   'r' (read)''' +\
205                                        '''   'a' (append)'''
206
207        #Create filename
208        #self.filename = create_filename(domain.get_datadir(),
209        #domain.get_name(), extension, len(domain))
210
211
212        self.filename = create_filename(domain.get_datadir(),
213            domain.get_name(), extension)
214
215        #print 'F', self.filename
216        self.timestep = 0
217        self.number_of_volumes = len(domain)
218        self.domain = domain
219
220
221        #FIXME: Should we have a general set_precision function?
222
223
224
225#Class for storing output to e.g. visualisation
226class Data_format_sww(Data_format):
227    """Interface to native NetCDF format (.sww) for storing model output
228
229    There are two kinds of data
230
231    1: Constant data: Vertex coordinates and field values. Stored once
232    2: Variable data: Conserved quantities. Stored once per timestep.
233
234    All data is assumed to reside at vertex locations.
235    """
236
237
238    def __init__(self, domain, mode = 'w',\
239                 max_size = 2000000000,
240                 recursion = False):
241        from Scientific.IO.NetCDF import NetCDFFile
242        from Numeric import Int, Float, Float32
243
244        self.precision = Float32 #Use single precision for quantities
245        if hasattr(domain, 'max_size'):
246            self.max_size = domain.max_size #file size max is 2Gig
247        else:
248            self.max_size = max_size
249        self.recursion = recursion
250        self.mode = mode
251
252        Data_format.__init__(self, domain, 'sww', mode)
253
[3642]254        if hasattr(domain, 'minimum_storable_height'):
255            self.minimum_storable_height =  domain.minimum_storable_height
[3529]256        else:
[3642]257            self.minimum_storable_height = default_minimum_storable_height
[2852]258
259        # NetCDF file definition
260        fid = NetCDFFile(self.filename, mode)
261
262        if mode == 'w':
263
264            #Create new file
265            fid.institution = 'Geoscience Australia'
[3560]266            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
[2852]267
268            if domain.smooth:
269                fid.smoothing = 'Yes'
270            else:
271                fid.smoothing = 'No'
272
273            fid.order = domain.default_order
274
275            if hasattr(domain, 'texture'):
276                fid.texture = domain.texture
277        #else:
278            #    fid.texture = 'None'
279
280            #Reference point
281            #Start time in seconds since the epoch (midnight 1/1/1970)
282            #FIXME: Use Georef
283            fid.starttime = domain.starttime
284
285            # dimension definitions
286            fid.createDimension('number_of_volumes', self.number_of_volumes)
287            fid.createDimension('number_of_vertices', 3)
288
289            if domain.smooth is True:
290                fid.createDimension('number_of_points', len(domain.vertexlist))
291            else:
292                fid.createDimension('number_of_points', 3*self.number_of_volumes)
293
294            fid.createDimension('number_of_timesteps', None) #extensible
295
296            # variable definitions
297            fid.createVariable('x', self.precision, ('number_of_points',))
298            fid.createVariable('y', self.precision, ('number_of_points',))
299            fid.createVariable('elevation', self.precision, ('number_of_points',))
300            if domain.geo_reference is not None:
301                domain.geo_reference.write_NetCDF(fid)
302
303            #FIXME: Backwards compatibility
304            fid.createVariable('z', self.precision, ('number_of_points',))
305            #################################
306
307            fid.createVariable('volumes', Int, ('number_of_volumes',
308                                                'number_of_vertices'))
309
310            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
311                                               # close to each other may appear as the same step
312                               ('number_of_timesteps',))
313
314            fid.createVariable('stage', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318            fid.createVariable('xmomentum', self.precision,
319                               ('number_of_timesteps',
320                                'number_of_points'))
321
322            fid.createVariable('ymomentum', self.precision,
323                               ('number_of_timesteps',
324                                'number_of_points'))
325
326        #Close
327        fid.close()
328
329
330    def store_connectivity(self):
331        """Specialisation of store_connectivity for net CDF format
332
333        Writes x,y,z coordinates of triangles constituting
334        the bed elevation.
335        """
336
337        from Scientific.IO.NetCDF import NetCDFFile
338
339        from Numeric import concatenate, Int
340
341        domain = self.domain
342
343        #Get NetCDF
344        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
345
346        # Get the variables
347        x = fid.variables['x']
348        y = fid.variables['y']
349        z = fid.variables['elevation']
350
351        volumes = fid.variables['volumes']
352
353        # Get X, Y and bed elevation Z
354        Q = domain.quantities['elevation']
355        X,Y,Z,V = Q.get_vertex_values(xy=True,
356                      precision = self.precision)
357
358
359
360        x[:] = X.astype(self.precision)
361        y[:] = Y.astype(self.precision)
362        z[:] = Z.astype(self.precision)
363
364        #FIXME: Backwards compatibility
365        z = fid.variables['z']
366        z[:] = Z.astype(self.precision)
367        ################################
368
369        volumes[:] = V.astype(volumes.typecode())
370
371        #Close
372        fid.close()
373
374    def store_timestep(self, names):
375        """Store time and named quantities to file
376        """
377        from Scientific.IO.NetCDF import NetCDFFile
378        import types
379        from time import sleep
380        from os import stat
381
382        from Numeric import choose
383
384        #Get NetCDF
385        retries = 0
386        file_open = False
387        while not file_open and retries < 10:
388            try:
389                fid = NetCDFFile(self.filename, 'a') #Open existing file
390            except IOError:
391                #This could happen if someone was reading the file.
392                #In that case, wait a while and try again
393                msg = 'Warning (store_timestep): File %s could not be opened'\
394                      %self.filename
395                msg += ' - trying step %s again' %self.domain.time
396                print msg
397                retries += 1
398                sleep(1)
399            else:
400                file_open = True
401
402        if not file_open:
403            msg = 'File %s could not be opened for append' %self.filename
404            raise DataFileNotOpenError, msg
405
406
407
408        #Check to see if the file is already too big:
409        time = fid.variables['time']
410        i = len(time)+1
411        file_size = stat(self.filename)[6]
412        file_size_increase =  file_size/i
413        if file_size + file_size_increase > self.max_size*(2**self.recursion):
414            #in order to get the file name and start time correct,
415            #I change the domian.filename and domain.starttime.
416            #This is the only way to do this without changing
417            #other modules (I think).
418
419            #write a filename addon that won't break swollens reader
420            #(10.sww is bad)
421            filename_ext = '_time_%s'%self.domain.time
422            filename_ext = filename_ext.replace('.', '_')
423            #remember the old filename, then give domain a
424            #name with the extension
425            old_domain_filename = self.domain.filename
426            if not self.recursion:
427                self.domain.filename = self.domain.filename+filename_ext
428
429            #change the domain starttime to the current time
430            old_domain_starttime = self.domain.starttime
431            self.domain.starttime = self.domain.time
432
433            #build a new data_structure.
434            next_data_structure=\
435                Data_format_sww(self.domain, mode=self.mode,\
436                                max_size = self.max_size,\
437                                recursion = self.recursion+1)
438            if not self.recursion:
439                print '    file_size = %s'%file_size
440                print '    saving file to %s'%next_data_structure.filename
441            #set up the new data_structure
442            self.domain.writer = next_data_structure
443
444            #FIXME - could be cleaner to use domain.store_timestep etc.
445            next_data_structure.store_connectivity()
446            next_data_structure.store_timestep(names)
447            fid.sync()
448            fid.close()
449
450            #restore the old starttime and filename
451            self.domain.starttime = old_domain_starttime
452            self.domain.filename = old_domain_filename
453        else:
454            self.recursion = False
455            domain = self.domain
456
457            # Get the variables
458            time = fid.variables['time']
459            stage = fid.variables['stage']
460            xmomentum = fid.variables['xmomentum']
461            ymomentum = fid.variables['ymomentum']
462            i = len(time)
463
464            #Store time
465            time[i] = self.domain.time
466
467
468            if type(names) not in [types.ListType, types.TupleType]:
469                names = [names]
470
471            for name in names:
472                # Get quantity
473                Q = domain.quantities[name]
474                A,V = Q.get_vertex_values(xy = False,
475                                          precision = self.precision)
476
477                #FIXME: Make this general (see below)
478                if name == 'stage':
479                    z = fid.variables['elevation']
480                    #print z[:]
481                    #print A-z[:]
[3642]482                    A = choose(A-z[:] >= self.minimum_storable_height,
483                               (z[:], A))
[2852]484                    stage[i,:] = A.astype(self.precision)
485                elif name == 'xmomentum':
486                    xmomentum[i,:] = A.astype(self.precision)
487                elif name == 'ymomentum':
488                    ymomentum[i,:] = A.astype(self.precision)
489
490                #As in....
491            #eval( name + '[i,:] = A.astype(self.precision)' )
492            #FIXME: But we need a UNIT test for that before refactoring
493
494
495
496            #Flush and close
497            fid.sync()
498            fid.close()
499
500
501
502#Class for handling checkpoints data
503class Data_format_cpt(Data_format):
504    """Interface to native NetCDF format (.cpt)
505    """
506
507
508    def __init__(self, domain, mode = 'w'):
509        from Scientific.IO.NetCDF import NetCDFFile
510        from Numeric import Int, Float, Float
511
512        self.precision = Float #Use full precision
513
514        Data_format.__init__(self, domain, 'sww', mode)
515
516
517        # NetCDF file definition
518        fid = NetCDFFile(self.filename, mode)
519
520        if mode == 'w':
521            #Create new file
522            fid.institution = 'Geoscience Australia'
523            fid.description = 'Checkpoint data'
524            #fid.smooth = domain.smooth
525            fid.order = domain.default_order
526
527            # dimension definitions
528            fid.createDimension('number_of_volumes', self.number_of_volumes)
529            fid.createDimension('number_of_vertices', 3)
530
531            #Store info at all vertices (no smoothing)
532            fid.createDimension('number_of_points', 3*self.number_of_volumes)
533            fid.createDimension('number_of_timesteps', None) #extensible
534
535            # variable definitions
536
537            #Mesh
538            fid.createVariable('x', self.precision, ('number_of_points',))
539            fid.createVariable('y', self.precision, ('number_of_points',))
540
541
542            fid.createVariable('volumes', Int, ('number_of_volumes',
543                                                'number_of_vertices'))
544
545            fid.createVariable('time', self.precision,
546                               ('number_of_timesteps',))
547
548            #Allocate space for all quantities
549            for name in domain.quantities.keys():
550                fid.createVariable(name, self.precision,
551                                   ('number_of_timesteps',
552                                    'number_of_points'))
553
554        #Close
555        fid.close()
556
557
558    def store_checkpoint(self):
559        """
560        Write x,y coordinates of triangles.
561        Write connectivity (
562        constituting
563        the bed elevation.
564        """
565
566        from Scientific.IO.NetCDF import NetCDFFile
567
568        from Numeric import concatenate
569
570        domain = self.domain
571
572        #Get NetCDF
573        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
574
575        # Get the variables
576        x = fid.variables['x']
577        y = fid.variables['y']
578
579        volumes = fid.variables['volumes']
580
581        # Get X, Y and bed elevation Z
582        Q = domain.quantities['elevation']
583        X,Y,Z,V = Q.get_vertex_values(xy=True,
584                      precision = self.precision)
585
586
587
588        x[:] = X.astype(self.precision)
589        y[:] = Y.astype(self.precision)
590        z[:] = Z.astype(self.precision)
591
592        volumes[:] = V
593
594        #Close
595        fid.close()
596
597
598    def store_timestep(self, name):
599        """Store time and named quantity to file
600        """
601        from Scientific.IO.NetCDF import NetCDFFile
602        from time import sleep
603
604        #Get NetCDF
605        retries = 0
606        file_open = False
607        while not file_open and retries < 10:
608            try:
609                fid = NetCDFFile(self.filename, 'a') #Open existing file
610            except IOError:
611                #This could happen if someone was reading the file.
612                #In that case, wait a while and try again
613                msg = 'Warning (store_timestep): File %s could not be opened'\
614                  %self.filename
615                msg += ' - trying again'
616                print msg
617                retries += 1
618                sleep(1)
619            else:
620                file_open = True
621
622        if not file_open:
623            msg = 'File %s could not be opened for append' %self.filename
624            raise DataFileNotOPenError, msg
625
626
627        domain = self.domain
628
629        # Get the variables
630        time = fid.variables['time']
631        stage = fid.variables['stage']
632        i = len(time)
633
634        #Store stage
635        time[i] = self.domain.time
636
637        # Get quantity
638        Q = domain.quantities[name]
639        A,V = Q.get_vertex_values(xy=False,
640                  precision = self.precision)
641
642        stage[i,:] = A.astype(self.precision)
643
644        #Flush and close
645        fid.sync()
646        fid.close()
647
648
649
650
651
652#Function for storing xya output
653#FIXME Not done yet for this version
[3292]654#This is obsolete.  Use geo_spatial_data instead
[2852]655class Data_format_xya(Data_format):
656    """Generic interface to data formats
657    """
658
659
660    def __init__(self, domain, mode = 'w'):
661        from Scientific.IO.NetCDF import NetCDFFile
662        from Numeric import Int, Float, Float32
663
664        self.precision = Float32 #Use single precision
665
666        Data_format.__init__(self, domain, 'xya', mode)
667
668
669
670    #FIXME -This is the old xya format
671    def store_all(self):
672        """Specialisation of store all for xya format
673
674        Writes x,y,z coordinates of triangles constituting
675        the bed elevation.
676        """
677
678        from Numeric import concatenate
679
680        domain = self.domain
681
682        fd = open(self.filename, 'w')
683
684
685        if domain.smooth is True:
686            number_of_points =  len(domain.vertexlist)
687        else:
688            number_of_points = 3*self.number_of_volumes
689
690        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
691
692        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
693                 " # <vertex #> <x> <y> [attributes]" + "\n")
694
695
696        # Get X, Y, bed elevation and friction (index=0,1)
697        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
698                                           indices = (0,1), precision = self.precision)
699
700        bed_eles = A[:,0]
701        fricts = A[:,1]
702
703        # Get stage (index=0)
704        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
705                                       indices = (0,), precision = self.precision)
706
707        stages = B[:,0]
708
709        #<vertex #> <x> <y> [attributes]
710        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
711                                               stages, fricts):
712
713            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
714            fd.write(s)
715
716        #close
717        fd.close()
718
719
720    def store_timestep(self, t, V0, V1, V2):
721        """Store time, water heights (and momentums) to file
722        """
723        pass
724
725
[3678]726#### NEED national exposure database
[3292]727   
728LAT_TITLE = 'LATITUDE'
729LONG_TITLE = 'LONGITUDE'
[3336]730X_TITLE = 'x'
731Y_TITLE = 'y'
[3292]732class Exposure_csv:
733    def __init__(self,file_name, latitude_title=LAT_TITLE,
[3398]734                 longitude_title=LONG_TITLE, is_x_y_locations=None,
[3336]735                 x_title=X_TITLE, y_title=Y_TITLE,
736                 refine_polygon=None):
[3292]737        """
[3296]738        This class is for handling the exposure csv file.
739        It reads the file in and converts the lats and longs to a geospatial
740        data object.
741        Use the methods to read and write columns.
742
743        The format of the csv files it reads is;
744           The first row is a title row.
745           comma's are the delimiters
746           each column is a 'set' of data
747
748        Feel free to use/expand it to read other csv files.
749           
750           
751        It is not for adding and deleting rows
752       
[3292]753        Can geospatial handle string attributes? It's not made for them.
754        Currently it can't load and save string att's.
755
756        So just use geospatial to hold the x, y and georef? Bad, since
757        different att's are in diferent structures.  Not so bad, the info
758        to write if the .csv file is saved is in attribute_dic
759
760        The location info is in the geospatial attribute.
761       
762       
763        """
764        self._file_name = file_name
765        self._geospatial = None #
766
767        # self._attribute_dic is a dictionary.
768        #The keys are the column titles.
769        #The values are lists of column data
770       
771        # self._title_index_dic is a dictionary.
772        #The keys are the column titles.
773        #The values are the index positions of file columns.
774        self._attribute_dic, self._title_index_dic = \
775        self._load_exposure_csv(self._file_name)
776        try:
777            lats = self._attribute_dic[latitude_title]
778            longs = self._attribute_dic[longitude_title]
779           
780        except KeyError:
781            # maybe a warning..
[3398]782            #Let's see if this works..
783            if False != is_x_y_locations:
784                is_x_y_locations = True
[3292]785            pass
786        else:
787            self._geospatial = Geospatial_data(latitudes = lats,
788                 longitudes = longs)
[3336]789
[3398]790        if is_x_y_locations is True:
[3336]791            if self._geospatial is not None:
792                pass #fixme throw an error
793            try:
794                xs = self._attribute_dic[x_title]
795                ys = self._attribute_dic[y_title]
796                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
797            except KeyError:
798                # maybe a warning..
[3664]799                msg = "Could not find location information."
[3336]800                raise TitleValueError, msg
801            else:
802                self._geospatial = Geospatial_data(data_points=points)
[3292]803           
804        # create a list of points that are in the refining_polygon
805        # described by a list of indexes representing the points
806
807    def __cmp__(self, other):
808        #print "self._attribute_dic",self._attribute_dic
809        #print "other._attribute_dic",other._attribute_dic
810        #print "self._title_index_dic", self._title_index_dic
811        #print "other._title_index_dic", other._title_index_dic
812       
813        #check that a is an instance of this class
814        if isinstance(self, type(other)):
815            result = cmp(self._attribute_dic, other._attribute_dic)
816            if result <>0:
817                return result
818            # The order of the columns is important. Therefore..
819            result = cmp(self._title_index_dic, other._title_index_dic)
820            if result <>0:
821                return result
822            for self_ls, other_ls in map(None,self._attribute_dic, \
823                    other._attribute_dic):
824                result = cmp(self._attribute_dic[self_ls],
825                             other._attribute_dic[other_ls])
826                if result <>0:
827                    return result
828            return 0
829        else:
830            return 1
831   
832    def _load_exposure_csv(self, file_name):
833        """
834        Load in the csv as a dic, title as key and column info as value, .
835        Also, create a dic, title as key and column index as value.
836        """
837        #
838        attribute_dic = {}
839        title_index_dic = {}
840        titles_stripped = [] # list of titles
841       
842        reader = csv.reader(file(file_name))
843
844        # Read in and manipulate the title info
845        titles = reader.next()
846        for i,title in enumerate(titles):
847            titles_stripped.append(title.strip())
848            title_index_dic[title.strip()] = i
849        title_count = len(titles_stripped)       
850        #print "title_index_dic",title_index_dic
851
852       
853        #create a dic of colum values, indexed by column title
854        for line in reader:
855            if len(line) <> title_count:
856                raise IOError #FIXME make this nicer
857            for i, value in enumerate(line):
858                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
859           
860        return attribute_dic, title_index_dic
861
862    def get_column(self, column_name, use_refind_polygon=False):
863        """
864        Given a column name return a list of the column values
865
866        Note, the type of the values will be String!
[3437]867        do this to change a list of strings to a list of floats
868        time = [float(x) for x in time]
[3292]869       
870        Not implemented:
871        if use_refind_polygon is True, only return values in the
872        refined polygon
873        """
874        if not self._attribute_dic.has_key(column_name):
875            msg = 'Therer is  no column called %s!' %column_name
876            raise TitleValueError, msg
877        return self._attribute_dic[column_name]
878
[3437]879
880    def get_value(self, value_column_name,
881                  known_column_name,
882                  known_values,
883                  use_refind_polygon=False):
884        """
885        Do linear interpolation on the known_colum, using the known_value,
886        to return a value of the column_value_name.
887        """
888        pass
889
890
[3292]891    def get_location(self, use_refind_polygon=False):
892        """
893        Return a geospatial object which describes the
894        locations of the location file.
895
896        Note, if there is not location info, this returns None.
897       
898        Not implemented:
899        if use_refind_polygon is True, only return values in the
900        refined polygon
901        """
902        return self._geospatial
903
904    def set_column(self, column_name, column_values, overwrite=False):
905        """
906        Add a column to the 'end' (with the right most column being the end)
907        of the csv file.
908
909        Set overwrite to True if you want to overwrite a column.
910       
911        Note, in column_name white space is removed and case is not checked.
912        Precondition
913        The column_name and column_values cannot have comma's in it.
914        """
915       
916        value_row_count = \
917            len(self._attribute_dic[self._title_index_dic.keys()[0]])
918        if len(column_values) <> value_row_count: 
919            msg = 'The number of column values must equal the number of rows.'
920            raise DataMissingValuesError, msg
921       
922        if self._attribute_dic.has_key(column_name):
923            if not overwrite:
924                msg = 'Column name %s already in use!' %column_name
925                raise TitleValueError, msg
926        else:
927            # New title.  Add it to the title index.
928            self._title_index_dic[column_name] = len(self._title_index_dic)
929        self._attribute_dic[column_name] = column_values
930        #print "self._title_index_dic[column_name]",self._title_index_dic
931
932    def save(self, file_name=None):
933        """
934        Save the exposure csv file
935        """
936        if file_name is None:
937            file_name = self._file_name
938       
939        fd = open(file_name,'wb')
940        writer = csv.writer(fd)
941       
942        #Write the title to a cvs file
943        line = [None]* len(self._title_index_dic)
944        for title in self._title_index_dic.iterkeys():
945            line[self._title_index_dic[title]]= title
946        writer.writerow(line)
947       
948        # Write the values to a cvs file
949        value_row_count = \
950            len(self._attribute_dic[self._title_index_dic.keys()[0]])
951        for row_i in range(value_row_count):
952            line = [None]* len(self._title_index_dic)
953            for title in self._title_index_dic.iterkeys():
954                line[self._title_index_dic[title]]= \
955                     self._attribute_dic[title][row_i]
956            writer.writerow(line)
957
958
[2852]959#Auxiliary
960def write_obj(filename,x,y,z):
961    """Store x,y,z vectors into filename (obj format)
962       Vectors are assumed to have dimension (M,3) where
963       M corresponds to the number elements.
964       triangles are assumed to be disconnected
965
966       The three numbers in each vector correspond to three vertices,
967
968       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
969
970    """
971    #print 'Writing obj to %s' % filename
972
973    import os.path
974
975    root, ext = os.path.splitext(filename)
976    if ext == '.obj':
977        FN = filename
978    else:
979        FN = filename + '.obj'
980
981
982    outfile = open(FN, 'wb')
983    outfile.write("# Triangulation as an obj file\n")
984
985    M, N = x.shape
986    assert N==3  #Assuming three vertices per element
987
988    for i in range(M):
989        for j in range(N):
990            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
991
992    for i in range(M):
993        base = i*N
994        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
995
996    outfile.close()
997
998
999#########################################################
1000#Conversion routines
1001########################################################
1002
1003def sww2obj(basefilename, size):
1004    """Convert netcdf based data output to obj
1005    """
1006    from Scientific.IO.NetCDF import NetCDFFile
1007
1008    from Numeric import Float, zeros
1009
1010    #Get NetCDF
1011    FN = create_filename('.', basefilename, 'sww', size)
1012    print 'Reading from ', FN
1013    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1014
1015
1016    # Get the variables
1017    x = fid.variables['x']
1018    y = fid.variables['y']
1019    z = fid.variables['elevation']
1020    time = fid.variables['time']
1021    stage = fid.variables['stage']
1022
1023    M = size  #Number of lines
1024    xx = zeros((M,3), Float)
1025    yy = zeros((M,3), Float)
1026    zz = zeros((M,3), Float)
1027
1028    for i in range(M):
1029        for j in range(3):
1030            xx[i,j] = x[i+j*M]
1031            yy[i,j] = y[i+j*M]
1032            zz[i,j] = z[i+j*M]
1033
1034    #Write obj for bathymetry
1035    FN = create_filename('.', basefilename, 'obj', size)
1036    write_obj(FN,xx,yy,zz)
1037
1038
1039    #Now read all the data with variable information, combine with
1040    #x,y info and store as obj
1041
1042    for k in range(len(time)):
1043        t = time[k]
1044        print 'Processing timestep %f' %t
1045
1046        for i in range(M):
1047            for j in range(3):
1048                zz[i,j] = stage[k,i+j*M]
1049
1050
1051        #Write obj for variable data
1052        #FN = create_filename(basefilename, 'obj', size, time=t)
1053        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1054        write_obj(FN,xx,yy,zz)
1055
1056
1057def dat2obj(basefilename):
1058    """Convert line based data output to obj
1059    FIXME: Obsolete?
1060    """
1061
1062    import glob, os
[3514]1063    from anuga.config import data_dir
[2852]1064
1065
1066    #Get bathymetry and x,y's
1067    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1068
1069    from Numeric import zeros, Float
1070
1071    M = len(lines)  #Number of lines
1072    x = zeros((M,3), Float)
1073    y = zeros((M,3), Float)
1074    z = zeros((M,3), Float)
1075
1076    ##i = 0
1077    for i, line in enumerate(lines):
1078        tokens = line.split()
1079        values = map(float,tokens)
1080
1081        for j in range(3):
1082            x[i,j] = values[j*3]
1083            y[i,j] = values[j*3+1]
1084            z[i,j] = values[j*3+2]
1085
1086        ##i += 1
1087
1088
1089    #Write obj for bathymetry
1090    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1091
1092
1093    #Now read all the data files with variable information, combine with
1094    #x,y info
1095    #and store as obj
1096
1097    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1098
1099    for filename in files:
1100        print 'Processing %s' % filename
1101
1102        lines = open(data_dir+os.sep+filename,'r').readlines()
1103        assert len(lines) == M
1104        root, ext = os.path.splitext(filename)
1105
1106        #Get time from filename
1107        i0 = filename.find('_time=')
1108        if i0 == -1:
1109            #Skip bathymetry file
1110            continue
1111
1112        i0 += 6  #Position where time starts
1113        i1 = filename.find('.dat')
1114
1115        if i1 > i0:
1116            t = float(filename[i0:i1])
1117        else:
1118            raise DataTimeError, 'Hmmmm'
1119
1120
1121
1122        ##i = 0
1123        for i, line in enumerate(lines):
1124            tokens = line.split()
1125            values = map(float,tokens)
1126
1127            for j in range(3):
1128                z[i,j] = values[j]
1129
1130            ##i += 1
1131
1132        #Write obj for variable data
1133        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1134
1135
1136def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1137    """Read netcdf filename1, pick timesteps first:step:last and save to
1138    nettcdf file filename2
1139    """
1140    from Scientific.IO.NetCDF import NetCDFFile
1141
1142    #Get NetCDF
1143    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1144    outfile = NetCDFFile(filename2, 'w')  #Open new file
1145
1146
1147    #Copy dimensions
1148    for d in infile.dimensions:
1149        outfile.createDimension(d, infile.dimensions[d])
1150
1151    for name in infile.variables:
1152        var = infile.variables[name]
1153        outfile.createVariable(name, var.typecode(), var.dimensions)
1154
1155
1156    #Copy the static variables
1157    for name in infile.variables:
1158        if name == 'time' or name == 'stage':
1159            pass
1160        else:
1161            #Copy
1162            outfile.variables[name][:] = infile.variables[name][:]
1163
1164    #Copy selected timesteps
1165    time = infile.variables['time']
1166    stage = infile.variables['stage']
1167
1168    newtime = outfile.variables['time']
1169    newstage = outfile.variables['stage']
1170
1171    if last is None:
1172        last = len(time)
1173
1174    selection = range(first, last, step)
1175    for i, j in enumerate(selection):
1176        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1177        newtime[i] = time[j]
1178        newstage[i,:] = stage[j,:]
1179
1180    #Close
1181    infile.close()
1182    outfile.close()
1183
1184
1185#Get data objects
1186def get_dataobject(domain, mode='w'):
1187    """Return instance of class of given format using filename
1188    """
1189
1190    cls = eval('Data_format_%s' %domain.format)
1191    return cls(domain, mode)
1192
[3292]1193#FIXME move into geospatial.  There should only be one method that
1194# reads xya, and writes pts.
[2852]1195def xya2pts(basename_in, basename_out=None, verbose=False,
1196            #easting_min=None, easting_max=None,
1197            #northing_min=None, northing_max=None,
1198            stride = 1,
1199            attribute_name = 'elevation',
1200            z_func = None):
1201    """Read points data from ascii (.xya)
1202
1203    Example:
1204
1205              x(m)        y(m)        z(m)
1206         0.00000e+00  0.00000e+00  1.3535000e-01
1207         0.00000e+00  1.40000e-02  1.3535000e-01
1208
1209
1210
1211    Convert to NetCDF pts format which is
1212
1213    points:  (Nx2) Float array
1214    elevation: N Float array
1215
1216    Only lines that contain three numeric values are processed
1217
1218    If z_func is specified, it will be applied to the third field
1219    """
1220
1221    import os
1222    #from Scientific.IO.NetCDF import NetCDFFile
1223    from Numeric import Float, arrayrange, concatenate
1224
1225    root, ext = os.path.splitext(basename_in)
1226
1227    if ext == '': ext = '.xya'
1228
1229    #Get NetCDF
1230    infile = open(root + ext, 'r')  #Open existing xya file for read
1231
1232    if verbose: print 'Reading xya points from %s' %(root + ext)
1233
1234    points = []
1235    attribute = []
1236    for i, line in enumerate(infile.readlines()):
1237
1238        if i % stride != 0: continue
1239
1240        fields = line.split()
1241
1242        try:
1243            assert len(fields) == 3
1244        except:
1245            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1246
1247        try:
1248            x = float( fields[0] )
1249            y = float( fields[1] )
1250            z = float( fields[2] )
1251        except:
1252            continue
1253
1254        points.append( [x, y] )
1255
1256        if callable(z_func):
1257            attribute.append(z_func(z))
1258        else:
1259            attribute.append(z)
1260
1261
1262    #Get output file
1263    if basename_out == None:
1264        ptsname = root + '.pts'
1265    else:
1266        ptsname = basename_out + '.pts'
1267
1268    if verbose: print 'Store to NetCDF file %s' %ptsname
1269    write_ptsfile(ptsname, points, attribute, attribute_name)
1270
1271
1272
1273######Obsoleted by export_points in load_mesh
1274def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1275                  zone=None, xllcorner=None, yllcorner=None):
1276    """Write points and associated attribute to pts (NetCDF) format
1277    """
1278
1279    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1280
1281    from Numeric import Float
1282
1283    if attribute_name is None:
1284        attribute_name = 'attribute'
1285
1286
1287    from Scientific.IO.NetCDF import NetCDFFile
1288
1289    # NetCDF file definition
1290    outfile = NetCDFFile(ptsname, 'w')
1291
1292
1293    #Create new file
1294    outfile.institution = 'Geoscience Australia'
1295    outfile.description = 'NetCDF pts format for compact and '\
1296                          'portable storage of spatial point data'
1297
1298
1299    #Georeferencing
[3514]1300    from anuga.coordinate_transforms.geo_reference import Geo_reference
[2852]1301    if zone is None:
1302        assert xllcorner is None, 'xllcorner must be None'
1303        assert yllcorner is None, 'yllcorner must be None'
1304        Geo_reference().write_NetCDF(outfile)
1305    else:
1306        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1307
1308
1309
1310    outfile.createDimension('number_of_points', len(points))
1311    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1312
1313    # variable definitions
1314    outfile.createVariable('points', Float, ('number_of_points',
1315                                             'number_of_dimensions'))
1316    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1317
1318    # Get handles to the variables
1319    nc_points = outfile.variables['points']
1320    nc_attribute = outfile.variables[attribute_name]
1321
1322    #Store data
1323    nc_points[:, :] = points
1324    nc_attribute[:] = attribute
1325
1326    outfile.close()
1327
1328
1329def dem2pts(basename_in, basename_out=None,
1330            easting_min=None, easting_max=None,
1331            northing_min=None, northing_max=None,
1332            use_cache=False, verbose=False,):
1333    """Read Digitial Elevation model from the following NetCDF format (.dem)
1334
1335    Example:
1336
1337    ncols         3121
1338    nrows         1800
1339    xllcorner     722000
1340    yllcorner     5893000
1341    cellsize      25
1342    NODATA_value  -9999
1343    138.3698 137.4194 136.5062 135.5558 ..........
1344
1345    Convert to NetCDF pts format which is
1346
1347    points:  (Nx2) Float array
1348    elevation: N Float array
1349    """
1350
1351
1352
1353    kwargs = {'basename_out': basename_out,
1354              'easting_min': easting_min,
1355              'easting_max': easting_max,
1356              'northing_min': northing_min,
1357              'northing_max': northing_max,
1358              'verbose': verbose}
1359
1360    if use_cache is True:
1361        from caching import cache
1362        result = cache(_dem2pts, basename_in, kwargs,
1363                       dependencies = [basename_in + '.dem'],
1364                       verbose = verbose)
1365
1366    else:
1367        result = apply(_dem2pts, [basename_in], kwargs)
1368
1369    return result
1370
1371
1372def _dem2pts(basename_in, basename_out=None, verbose=False,
1373            easting_min=None, easting_max=None,
1374            northing_min=None, northing_max=None):
1375    """Read Digitial Elevation model from the following NetCDF format (.dem)
1376
1377    Internal function. See public function dem2pts for details.
1378    """
1379
1380    #FIXME: Can this be written feasibly using write_pts?
1381
1382    import os
1383    from Scientific.IO.NetCDF import NetCDFFile
1384    from Numeric import Float, zeros, reshape, sum
1385
1386    root = basename_in
1387
1388    #Get NetCDF
1389    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1390
1391    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1392
1393    ncols = infile.ncols[0]
1394    nrows = infile.nrows[0]
1395    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1396    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1397    cellsize = infile.cellsize[0]
1398    NODATA_value = infile.NODATA_value[0]
1399    dem_elevation = infile.variables['elevation']
1400
1401    zone = infile.zone[0]
1402    false_easting = infile.false_easting[0]
1403    false_northing = infile.false_northing[0]
1404
1405    #Text strings
1406    projection = infile.projection
1407    datum = infile.datum
1408    units = infile.units
1409
1410
1411    #Get output file
1412    if basename_out == None:
1413        ptsname = root + '.pts'
1414    else:
1415        ptsname = basename_out + '.pts'
1416
1417    if verbose: print 'Store to NetCDF file %s' %ptsname
1418    # NetCDF file definition
1419    outfile = NetCDFFile(ptsname, 'w')
1420
1421    #Create new file
1422    outfile.institution = 'Geoscience Australia'
1423    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1424                      'of spatial point data'
1425    #assign default values
1426    if easting_min is None: easting_min = xllcorner
1427    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1428    if northing_min is None: northing_min = yllcorner
1429    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1430
1431    #compute offsets to update georeferencing
1432    easting_offset = xllcorner - easting_min
1433    northing_offset = yllcorner - northing_min
1434
1435    #Georeferencing
1436    outfile.zone = zone
1437    outfile.xllcorner = easting_min #Easting of lower left corner
1438    outfile.yllcorner = northing_min #Northing of lower left corner
1439    outfile.false_easting = false_easting
1440    outfile.false_northing = false_northing
1441
1442    outfile.projection = projection
1443    outfile.datum = datum
1444    outfile.units = units
1445
1446
1447    #Grid info (FIXME: probably not going to be used, but heck)
1448    outfile.ncols = ncols
1449    outfile.nrows = nrows
1450
1451    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1452    totalnopoints = nrows*ncols
1453
1454    # calculating number of NODATA_values for each row in clipped region
1455    #FIXME: use array operations to do faster
1456    nn = 0
1457    k = 0
1458    i1_0 = 0
1459    j1_0 = 0
1460    thisj = 0
1461    thisi = 0
1462    for i in range(nrows):
1463        y = (nrows-i-1)*cellsize + yllcorner
1464        for j in range(ncols):
1465            x = j*cellsize + xllcorner
1466            if easting_min <= x <= easting_max and \
1467               northing_min <= y <= northing_max:
1468                thisj = j
1469                thisi = i
1470                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1471
1472                if k == 0:
1473                    i1_0 = i
1474                    j1_0 = j
1475                k += 1
1476
1477    index1 = j1_0
1478    index2 = thisj
1479
1480    # dimension definitions
1481    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1482    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1483
1484    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1485    nopoints = clippednopoints-nn
1486
1487    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1488
[3664]1489    if verbose:
[2852]1490        print 'There are %d values in the elevation' %totalnopoints
1491        print 'There are %d values in the clipped elevation' %clippednopoints
1492        print 'There are %d NODATA_values in the clipped elevation' %nn
1493
1494    outfile.createDimension('number_of_points', nopoints)
1495    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1496
1497    # variable definitions
1498    outfile.createVariable('points', Float, ('number_of_points',
1499                                             'number_of_dimensions'))
1500    outfile.createVariable('elevation', Float, ('number_of_points',))
1501
1502    # Get handles to the variables
1503    points = outfile.variables['points']
1504    elevation = outfile.variables['elevation']
1505
1506    lenv = index2-index1+1
1507    #Store data
1508    global_index = 0
1509    #for i in range(nrows):
1510    for i in range(i1_0,thisi+1,1):
1511        if verbose and i%((nrows+10)/10)==0:
1512            print 'Processing row %d of %d' %(i, nrows)
1513
1514        lower_index = global_index
1515
1516        v = dem_elevation_r[i,index1:index2+1]
1517        no_NODATA = sum(v == NODATA_value)
1518        if no_NODATA > 0:
1519            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1520        else:
1521            newcols = lenv #ncols_in_bounding_box
1522
1523        telev = zeros(newcols, Float)
1524        tpoints = zeros((newcols, 2), Float)
1525
1526        local_index = 0
1527
1528        y = (nrows-i-1)*cellsize + yllcorner
1529        #for j in range(ncols):
1530        for j in range(j1_0,index2+1,1):
1531
1532            x = j*cellsize + xllcorner
1533            if easting_min <= x <= easting_max and \
1534               northing_min <= y <= northing_max and \
1535               dem_elevation_r[i,j] <> NODATA_value:
1536                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1537                telev[local_index] = dem_elevation_r[i, j]
1538                global_index += 1
1539                local_index += 1
1540
1541        upper_index = global_index
1542
1543        if upper_index == lower_index + newcols:
1544            points[lower_index:upper_index, :] = tpoints
1545            elevation[lower_index:upper_index] = telev
1546
1547    assert global_index == nopoints, 'index not equal to number of points'
1548
1549    infile.close()
1550    outfile.close()
1551
1552
1553
1554def _read_hecras_cross_sections(lines):
1555    """Return block of surface lines for each cross section
1556    Starts with SURFACE LINE,
1557    Ends with END CROSS-SECTION
1558    """
1559
1560    points = []
1561
1562    reading_surface = False
1563    for i, line in enumerate(lines):
1564
1565        if len(line.strip()) == 0:    #Ignore blanks
1566            continue
1567
1568        if lines[i].strip().startswith('SURFACE LINE'):
1569            reading_surface = True
1570            continue
1571
1572        if lines[i].strip().startswith('END') and reading_surface:
1573            yield points
1574            reading_surface = False
1575            points = []
1576
1577        if reading_surface:
1578            fields = line.strip().split(',')
1579            easting = float(fields[0])
1580            northing = float(fields[1])
1581            elevation = float(fields[2])
1582            points.append([easting, northing, elevation])
1583
1584
1585
1586
1587def hecras_cross_sections2pts(basename_in,
1588                              basename_out=None,
1589                              verbose=False):
1590    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1591
1592    Example:
1593
1594
1595# RAS export file created on Mon 15Aug2005 11:42
1596# by HEC-RAS Version 3.1.1
1597
1598BEGIN HEADER:
1599  UNITS: METRIC
1600  DTM TYPE: TIN
1601  DTM: v:\1\cit\perth_topo\river_tin
1602  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1603  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1604  MAP PROJECTION: UTM
1605  PROJECTION ZONE: 50
1606  DATUM: AGD66
1607  VERTICAL DATUM:
1608  NUMBER OF REACHES:  19
1609  NUMBER OF CROSS-SECTIONS:  14206
1610END HEADER:
1611
1612
1613Only the SURFACE LINE data of the following form will be utilised
1614
1615  CROSS-SECTION:
1616    STREAM ID:Southern-Wungong
1617    REACH ID:Southern-Wungong
1618    STATION:19040.*
1619    CUT LINE:
1620      405548.671603161 , 6438142.7594925
1621      405734.536092045 , 6438326.10404912
1622      405745.130459356 , 6438331.48627354
1623      405813.89633823 , 6438368.6272789
1624    SURFACE LINE:
1625     405548.67,   6438142.76,   35.37
1626     405552.24,   6438146.28,   35.41
1627     405554.78,   6438148.78,   35.44
1628     405555.80,   6438149.79,   35.44
1629     405559.37,   6438153.31,   35.45
1630     405560.88,   6438154.81,   35.44
1631     405562.93,   6438156.83,   35.42
1632     405566.50,   6438160.35,   35.38
1633     405566.99,   6438160.83,   35.37
1634     ...
1635   END CROSS-SECTION
1636
1637    Convert to NetCDF pts format which is
1638
1639    points:  (Nx2) Float array
1640    elevation: N Float array
1641    """
1642
1643    #FIXME: Can this be written feasibly using write_pts?
1644
1645    import os
1646    from Scientific.IO.NetCDF import NetCDFFile
1647    from Numeric import Float, zeros, reshape
1648
1649    root = basename_in
1650
1651    #Get ASCII file
1652    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1653
1654    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1655
1656    lines = infile.readlines()
1657    infile.close()
1658
1659    if verbose: print 'Converting to pts format'
1660
1661    i = 0
1662    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1663        i += 1
1664
1665    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1666    i += 1
1667
1668    assert lines[i].strip().upper().startswith('UNITS:')
1669    units = lines[i].strip().split()[1]
1670    i += 1
1671
1672    assert lines[i].strip().upper().startswith('DTM TYPE:')
1673    i += 1
1674
1675    assert lines[i].strip().upper().startswith('DTM:')
1676    i += 1
1677
1678    assert lines[i].strip().upper().startswith('STREAM')
1679    i += 1
1680
1681    assert lines[i].strip().upper().startswith('CROSS')
1682    i += 1
1683
1684    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1685    projection = lines[i].strip().split(':')[1]
1686    i += 1
1687
1688    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1689    zone = int(lines[i].strip().split(':')[1])
1690    i += 1
1691
1692    assert lines[i].strip().upper().startswith('DATUM:')
1693    datum = lines[i].strip().split(':')[1]
1694    i += 1
1695
1696    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1697    i += 1
1698
1699    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1700    i += 1
1701
1702    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1703    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1704    i += 1
1705
1706
1707    #Now read all points
1708    points = []
1709    elevation = []
1710    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1711        for k, entry in enumerate(entries):
1712            points.append(entry[:2])
1713            elevation.append(entry[2])
1714
1715
1716    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1717          %(j+1, number_of_cross_sections)
1718    assert j+1 == number_of_cross_sections, msg
1719
1720    #Get output file
1721    if basename_out == None:
1722        ptsname = root + '.pts'
1723    else:
1724        ptsname = basename_out + '.pts'
1725
1726    #FIXME (DSG-ON): use loadASCII export_points_file
1727    if verbose: print 'Store to NetCDF file %s' %ptsname
1728    # NetCDF file definition
1729    outfile = NetCDFFile(ptsname, 'w')
1730
1731    #Create new file
1732    outfile.institution = 'Geoscience Australia'
1733    outfile.description = 'NetCDF pts format for compact and portable ' +\
1734                          'storage of spatial point data derived from HEC-RAS'
1735
1736    #Georeferencing
1737    outfile.zone = zone
1738    outfile.xllcorner = 0.0
1739    outfile.yllcorner = 0.0
1740    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1741    outfile.false_northing = 1000000
1742
1743    outfile.projection = projection
1744    outfile.datum = datum
1745    outfile.units = units
1746
1747
1748    outfile.createDimension('number_of_points', len(points))
1749    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1750
1751    # variable definitions
1752    outfile.createVariable('points', Float, ('number_of_points',
1753                                             'number_of_dimensions'))
1754    outfile.createVariable('elevation', Float, ('number_of_points',))
1755
1756    # Get handles to the variables
1757    outfile.variables['points'][:] = points
1758    outfile.variables['elevation'][:] = elevation
1759
1760    outfile.close()
1761
1762
1763
1764def sww2dem(basename_in, basename_out = None,
1765            quantity = None,
1766            timestep = None,
1767            reduction = None,
1768            cellsize = 10,
1769            NODATA_value = -9999,
1770            easting_min = None,
1771            easting_max = None,
1772            northing_min = None,
1773            northing_max = None,
1774            verbose = False,
1775            origin = None,
1776            datum = 'WGS84',
1777        format = 'ers'):
1778
1779    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1780
1781    Example (ASC):
1782
1783    ncols         3121
1784    nrows         1800
1785    xllcorner     722000
1786    yllcorner     5893000
1787    cellsize      25
1788    NODATA_value  -9999
1789    138.3698 137.4194 136.5062 135.5558 ..........
1790
1791    Also write accompanying file with same basename_in but extension .prj
1792    used to fix the UTM zone, datum, false northings and eastings.
1793
1794    The prj format is assumed to be as
1795
1796    Projection    UTM
1797    Zone          56
1798    Datum         WGS84
1799    Zunits        NO
1800    Units         METERS
1801    Spheroid      WGS84
1802    Xshift        0.0000000000
1803    Yshift        10000000.0000000000
1804    Parameters
1805
1806
1807    The parameter quantity must be the name of an existing quantity or
1808    an expression involving existing quantities. The default is
1809    'elevation'.
1810
1811    if timestep (an index) is given, output quantity at that timestep
1812
1813    if reduction is given use that to reduce quantity over all timesteps.
1814
1815    datum
1816
1817    format can be either 'asc' or 'ers'
1818    """
1819
1820    import sys
1821    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1822    from Numeric import array2string
1823
[3514]1824    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]1825    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2852]1826
1827    msg = 'Format must be either asc or ers'
1828    assert format.lower() in ['asc', 'ers'], msg
1829
1830
1831    false_easting = 500000
1832    false_northing = 10000000
1833
1834    if quantity is None:
1835        quantity = 'elevation'
1836
1837    if reduction is None:
1838        reduction = max
1839
1840    if basename_out is None:
1841        basename_out = basename_in + '_%s' %quantity
1842
1843    swwfile = basename_in + '.sww'
1844    demfile = basename_out + '.' + format
1845    # Note the use of a .ers extension is optional (write_ermapper_grid will
1846    # deal with either option
1847
1848    #Read sww file
1849    if verbose: print 'Reading from %s' %swwfile
1850    from Scientific.IO.NetCDF import NetCDFFile
1851    fid = NetCDFFile(swwfile)
1852
1853    #Get extent and reference
1854    x = fid.variables['x'][:]
1855    y = fid.variables['y'][:]
1856    volumes = fid.variables['volumes'][:]
1857
1858    number_of_timesteps = fid.dimensions['number_of_timesteps']
1859    number_of_points = fid.dimensions['number_of_points']
1860    if origin is None:
1861
1862        #Get geo_reference
1863        #sww files don't have to have a geo_ref
1864        try:
1865            geo_reference = Geo_reference(NetCDFObject=fid)
1866        except AttributeError, e:
1867            geo_reference = Geo_reference() #Default georef object
1868
1869        xllcorner = geo_reference.get_xllcorner()
1870        yllcorner = geo_reference.get_yllcorner()
1871        zone = geo_reference.get_zone()
1872    else:
1873        zone = origin[0]
1874        xllcorner = origin[1]
1875        yllcorner = origin[2]
1876
1877
1878
1879    #FIXME: Refactor using code from file_function.statistics
1880    #Something like print swwstats(swwname)
1881    if verbose:
1882        x = fid.variables['x'][:]
1883        y = fid.variables['y'][:]
1884        times = fid.variables['time'][:]
1885        print '------------------------------------------------'
1886        print 'Statistics of SWW file:'
1887        print '  Name: %s' %swwfile
1888        print '  Reference:'
1889        print '    Lower left corner: [%f, %f]'\
1890              %(xllcorner, yllcorner)
1891        print '    Start time: %f' %fid.starttime[0]
1892        print '  Extent:'
1893        print '    x [m] in [%f, %f], len(x) == %d'\
1894              %(min(x.flat), max(x.flat), len(x.flat))
1895        print '    y [m] in [%f, %f], len(y) == %d'\
1896              %(min(y.flat), max(y.flat), len(y.flat))
1897        print '    t [s] in [%f, %f], len(t) == %d'\
1898              %(min(times), max(times), len(times))
1899        print '  Quantities [SI units]:'
1900        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1901            q = fid.variables[name][:].flat
1902            print '    %s in [%f, %f]' %(name, min(q), max(q))
1903
1904
1905
1906
1907
[2891]1908    # Get quantity and reduce if applicable
[2852]1909    if verbose: print 'Processing quantity %s' %quantity
1910
[2891]1911    # Turn NetCDF objects into Numeric arrays
[2852]1912    quantity_dict = {}
1913    for name in fid.variables.keys():
1914        quantity_dict[name] = fid.variables[name][:]
1915
1916
[2891]1917
1918    # Convert quantity expression to quantities found in sww file   
[2852]1919    q = apply_expression_to_dictionary(quantity, quantity_dict)
1920
1921
1922
1923    if len(q.shape) == 2:
1924        #q has a time component and needs to be reduced along
1925        #the temporal dimension
1926        if verbose: print 'Reducing quantity %s' %quantity
1927        q_reduced = zeros( number_of_points, Float )
1928
1929        for k in range(number_of_points):
1930            q_reduced[k] = reduction( q[:,k] )
1931
1932        q = q_reduced
1933
1934    #Post condition: Now q has dimension: number_of_points
1935    assert len(q.shape) == 1
1936    assert q.shape[0] == number_of_points
1937
1938
1939    if verbose:
1940        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1941
1942
1943    #Create grid and update xll/yll corner and x,y
1944
1945    #Relative extent
1946    if easting_min is None:
1947        xmin = min(x)
1948    else:
1949        xmin = easting_min - xllcorner
1950
1951    if easting_max is None:
1952        xmax = max(x)
1953    else:
1954        xmax = easting_max - xllcorner
1955
1956    if northing_min is None:
1957        ymin = min(y)
1958    else:
1959        ymin = northing_min - yllcorner
1960
1961    if northing_max is None:
1962        ymax = max(y)
1963    else:
1964        ymax = northing_max - yllcorner
1965
1966
1967
1968    if verbose: print 'Creating grid'
1969    ncols = int((xmax-xmin)/cellsize)+1
1970    nrows = int((ymax-ymin)/cellsize)+1
1971
1972
1973    #New absolute reference and coordinates
1974    newxllcorner = xmin+xllcorner
1975    newyllcorner = ymin+yllcorner
1976
1977    x = x+xllcorner-newxllcorner
1978    y = y+yllcorner-newyllcorner
1979
1980    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1981    assert len(vertex_points.shape) == 2
1982
1983
1984
1985    grid_points = zeros ( (ncols*nrows, 2), Float )
1986
1987
1988    for i in xrange(nrows):
1989        if format.lower() == 'asc':
1990            yg = i*cellsize
1991        else:
1992        #this will flip the order of the y values for ers
1993            yg = (nrows-i)*cellsize
1994
1995        for j in xrange(ncols):
1996            xg = j*cellsize
1997            k = i*ncols + j
1998
1999            grid_points[k,0] = xg
2000            grid_points[k,1] = yg
2001
2002    #Interpolate
2003    #from least_squares import Interpolation
[3514]2004    from anuga.fit_interpolate.interpolate import Interpolate
[2852]2005
2006
2007    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2008
2009
2010
2011    #Interpolate using quantity values
2012    if verbose: print 'Interpolating'
2013    grid_values = interp.interpolate(q, grid_points).flat
2014
2015
2016    if verbose:
2017        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2018                                                      max(grid_values))
2019
2020    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2021    P = interp.mesh.get_boundary_polygon()
2022    outside_indices = outside_polygon(grid_points, P, closed=True)
2023
2024    for i in outside_indices:
2025        grid_values[i] = NODATA_value
2026
2027
2028
2029
2030    if format.lower() == 'ers':
2031        # setup ERS header information
2032        grid_values = reshape(grid_values,(nrows, ncols))
2033        header = {}
2034        header['datum'] = '"' + datum + '"'
2035        # FIXME The use of hardwired UTM and zone number needs to be made optional
2036        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2037        header['projection'] = '"UTM-' + str(zone) + '"'
2038        header['coordinatetype'] = 'EN'
2039        if header['coordinatetype'] == 'LL':
2040            header['longitude'] = str(newxllcorner)
2041            header['latitude'] = str(newyllcorner)
2042        elif header['coordinatetype'] == 'EN':
2043            header['eastings'] = str(newxllcorner)
2044            header['northings'] = str(newyllcorner)
2045        header['nullcellvalue'] = str(NODATA_value)
2046        header['xdimension'] = str(cellsize)
2047        header['ydimension'] = str(cellsize)
2048        header['value'] = '"' + quantity + '"'
2049        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2050
2051
2052        #Write
2053        if verbose: print 'Writing %s' %demfile
2054        import ermapper_grids
2055        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2056
2057        fid.close()
2058    else:
2059        #Write to Ascii format
2060
2061        #Write prj file
2062        prjfile = basename_out + '.prj'
2063
2064        if verbose: print 'Writing %s' %prjfile
2065        prjid = open(prjfile, 'w')
2066        prjid.write('Projection    %s\n' %'UTM')
2067        prjid.write('Zone          %d\n' %zone)
2068        prjid.write('Datum         %s\n' %datum)
2069        prjid.write('Zunits        NO\n')
2070        prjid.write('Units         METERS\n')
2071        prjid.write('Spheroid      %s\n' %datum)
2072        prjid.write('Xshift        %d\n' %false_easting)
2073        prjid.write('Yshift        %d\n' %false_northing)
2074        prjid.write('Parameters\n')
2075        prjid.close()
2076
2077
2078
2079        if verbose: print 'Writing %s' %demfile
2080
2081        ascid = open(demfile, 'w')
2082
2083        ascid.write('ncols         %d\n' %ncols)
2084        ascid.write('nrows         %d\n' %nrows)
2085        ascid.write('xllcorner     %d\n' %newxllcorner)
2086        ascid.write('yllcorner     %d\n' %newyllcorner)
2087        ascid.write('cellsize      %f\n' %cellsize)
2088        ascid.write('NODATA_value  %d\n' %NODATA_value)
2089
2090
2091        #Get bounding polygon from mesh
2092        #P = interp.mesh.get_boundary_polygon()
2093        #inside_indices = inside_polygon(grid_points, P)
2094
2095        for i in range(nrows):
2096            if verbose and i%((nrows+10)/10)==0:
2097                print 'Doing row %d of %d' %(i, nrows)
2098
2099            base_index = (nrows-i-1)*ncols
2100
2101            slice = grid_values[base_index:base_index+ncols]
2102            s = array2string(slice, max_line_width=sys.maxint)
2103            ascid.write(s[1:-1] + '\n')
2104
2105
2106            #print
2107            #for j in range(ncols):
2108            #    index = base_index+j#
2109            #    print grid_values[index],
2110            #    ascid.write('%f ' %grid_values[index])
2111            #ascid.write('\n')
2112
2113        #Close
2114        ascid.close()
2115        fid.close()
2116
2117#Backwards compatibility
2118def sww2asc(basename_in, basename_out = None,
2119            quantity = None,
2120            timestep = None,
2121            reduction = None,
2122            cellsize = 10,
2123            verbose = False,
2124            origin = None):
2125    print 'sww2asc will soon be obsoleted - please use sww2dem'
2126    sww2dem(basename_in,
2127            basename_out = basename_out,
2128            quantity = quantity,
2129            timestep = timestep,
2130            reduction = reduction,
2131            cellsize = cellsize,
2132            verbose = verbose,
2133            origin = origin,
2134        datum = 'WGS84',
2135        format = 'asc')
2136
2137def sww2ers(basename_in, basename_out = None,
2138            quantity = None,
2139            timestep = None,
2140            reduction = None,
2141            cellsize = 10,
2142            verbose = False,
2143            origin = None,
2144            datum = 'WGS84'):
2145    print 'sww2ers will soon be obsoleted - please use sww2dem'
2146    sww2dem(basename_in,
2147            basename_out = basename_out,
2148            quantity = quantity,
2149            timestep = timestep,
2150            reduction = reduction,
2151            cellsize = cellsize,
2152            verbose = verbose,
2153            origin = origin,
[2891]2154            datum = datum,
2155            format = 'ers')
[2852]2156################################# END COMPATIBILITY ##############
2157
2158
2159
[2891]2160def sww2pts(basename_in, basename_out=None,
2161            data_points=None,
2162            quantity=None,
2163            timestep=None,
2164            reduction=None,
2165            NODATA_value=-9999,
2166            verbose=False,
2167            origin=None):
2168            #datum = 'WGS84')
2169
2170
2171    """Read SWW file and convert to interpolated values at selected points
2172
2173    The parameter quantity' must be the name of an existing quantity or
2174    an expression involving existing quantities. The default is
2175    'elevation'.
2176
2177    if timestep (an index) is given, output quantity at that timestep
2178
2179    if reduction is given use that to reduce quantity over all timesteps.
2180
2181    data_points (Nx2 array) give locations of points where quantity is to be computed.
2182   
2183    """
2184
2185    import sys
2186    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2187    from Numeric import array2string
2188
[3514]2189    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]2190    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2891]2191
[3514]2192    from anuga.geospatial_data.geospatial_data import Geospatial_data
[2891]2193
2194    if quantity is None:
2195        quantity = 'elevation'
2196
2197    if reduction is None:
2198        reduction = max
2199
2200    if basename_out is None:
2201        basename_out = basename_in + '_%s' %quantity
2202
2203    swwfile = basename_in + '.sww'
2204    ptsfile = basename_out + '.pts'
2205
2206    # Read sww file
2207    if verbose: print 'Reading from %s' %swwfile
2208    from Scientific.IO.NetCDF import NetCDFFile
2209    fid = NetCDFFile(swwfile)
2210
2211    # Get extent and reference
2212    x = fid.variables['x'][:]
2213    y = fid.variables['y'][:]
2214    volumes = fid.variables['volumes'][:]
2215
2216    number_of_timesteps = fid.dimensions['number_of_timesteps']
2217    number_of_points = fid.dimensions['number_of_points']
2218    if origin is None:
2219
2220        # Get geo_reference
2221        # sww files don't have to have a geo_ref
2222        try:
2223            geo_reference = Geo_reference(NetCDFObject=fid)
2224        except AttributeError, e:
2225            geo_reference = Geo_reference() #Default georef object
2226
2227        xllcorner = geo_reference.get_xllcorner()
2228        yllcorner = geo_reference.get_yllcorner()
2229        zone = geo_reference.get_zone()
2230    else:
2231        zone = origin[0]
2232        xllcorner = origin[1]
2233        yllcorner = origin[2]
2234
2235
2236
2237    # FIXME: Refactor using code from file_function.statistics
2238    # Something like print swwstats(swwname)
2239    if verbose:
2240        x = fid.variables['x'][:]
2241        y = fid.variables['y'][:]
2242        times = fid.variables['time'][:]
2243        print '------------------------------------------------'
2244        print 'Statistics of SWW file:'
2245        print '  Name: %s' %swwfile
2246        print '  Reference:'
2247        print '    Lower left corner: [%f, %f]'\
2248              %(xllcorner, yllcorner)
2249        print '    Start time: %f' %fid.starttime[0]
2250        print '  Extent:'
2251        print '    x [m] in [%f, %f], len(x) == %d'\
2252              %(min(x.flat), max(x.flat), len(x.flat))
2253        print '    y [m] in [%f, %f], len(y) == %d'\
2254              %(min(y.flat), max(y.flat), len(y.flat))
2255        print '    t [s] in [%f, %f], len(t) == %d'\
2256              %(min(times), max(times), len(times))
2257        print '  Quantities [SI units]:'
2258        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2259            q = fid.variables[name][:].flat
2260            print '    %s in [%f, %f]' %(name, min(q), max(q))
2261
2262
2263
2264    # Get quantity and reduce if applicable
2265    if verbose: print 'Processing quantity %s' %quantity
2266
2267    # Turn NetCDF objects into Numeric arrays
2268    quantity_dict = {}
2269    for name in fid.variables.keys():
2270        quantity_dict[name] = fid.variables[name][:]
2271
2272
2273
2274    # Convert quantity expression to quantities found in sww file   
2275    q = apply_expression_to_dictionary(quantity, quantity_dict)
2276
2277
2278
2279    if len(q.shape) == 2:
2280        # q has a time component and needs to be reduced along
2281        # the temporal dimension
2282        if verbose: print 'Reducing quantity %s' %quantity
2283        q_reduced = zeros( number_of_points, Float )
2284
2285        for k in range(number_of_points):
2286            q_reduced[k] = reduction( q[:,k] )
2287
2288        q = q_reduced
2289
2290    # Post condition: Now q has dimension: number_of_points
2291    assert len(q.shape) == 1
2292    assert q.shape[0] == number_of_points
2293
2294
2295    if verbose:
2296        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2297
2298
2299    # Create grid and update xll/yll corner and x,y
2300    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2301    assert len(vertex_points.shape) == 2
2302
2303    # Interpolate
[3514]2304    from anuga.fit_interpolate.interpolate import Interpolate
[2891]2305    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2306
2307    # Interpolate using quantity values
2308    if verbose: print 'Interpolating'
2309    interpolated_values = interp.interpolate(q, data_points).flat
2310
2311
2312    if verbose:
2313        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2314                                                      max(interpolated_values))
2315
2316    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2317    P = interp.mesh.get_boundary_polygon()
2318    outside_indices = outside_polygon(data_points, P, closed=True)
2319
2320    for i in outside_indices:
2321        interpolated_values[i] = NODATA_value
2322
2323
2324    # Store results   
2325    G = Geospatial_data(data_points=data_points,
2326                        attributes=interpolated_values)
2327
2328    G.export_points_file(ptsfile, absolute = True)
2329
[2931]2330    fid.close()
[2891]2331
2332
[2852]2333def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2334                                  use_cache = False,
2335                                  verbose = False):
2336    """Read Digitial Elevation model from the following ASCII format (.asc)
2337
2338    Example:
2339
2340    ncols         3121
2341    nrows         1800
2342    xllcorner     722000
2343    yllcorner     5893000
2344    cellsize      25
2345    NODATA_value  -9999
2346    138.3698 137.4194 136.5062 135.5558 ..........
2347
2348    Convert basename_in + '.asc' to NetCDF format (.dem)
2349    mimicking the ASCII format closely.
2350
2351
2352    An accompanying file with same basename_in but extension .prj must exist
2353    and is used to fix the UTM zone, datum, false northings and eastings.
2354
2355    The prj format is assumed to be as
2356
2357    Projection    UTM
2358    Zone          56
2359    Datum         WGS84
2360    Zunits        NO
2361    Units         METERS
2362    Spheroid      WGS84
2363    Xshift        0.0000000000
2364    Yshift        10000000.0000000000
2365    Parameters
2366    """
2367
2368
2369
2370    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2371
2372    if use_cache is True:
2373        from caching import cache
2374        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2375                       dependencies = [basename_in + '.asc'],
2376                       verbose = verbose)
2377
2378    else:
2379        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2380
2381    return result
2382
2383
2384
2385
2386
2387
2388def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2389                                  verbose = False):
2390    """Read Digitial Elevation model from the following ASCII format (.asc)
2391
2392    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2393    """
2394
2395    import os
2396    from Scientific.IO.NetCDF import NetCDFFile
2397    from Numeric import Float, array
2398
2399    #root, ext = os.path.splitext(basename_in)
2400    root = basename_in
2401
2402    ###########################################
2403    # Read Meta data
2404    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2405    metadatafile = open(root + '.prj')
2406    metalines = metadatafile.readlines()
2407    metadatafile.close()
2408
2409    L = metalines[0].strip().split()
2410    assert L[0].strip().lower() == 'projection'
2411    projection = L[1].strip()                   #TEXT
2412
2413    L = metalines[1].strip().split()
2414    assert L[0].strip().lower() == 'zone'
2415    zone = int(L[1].strip())
2416
2417    L = metalines[2].strip().split()
2418    assert L[0].strip().lower() == 'datum'
2419    datum = L[1].strip()                        #TEXT
2420
2421    L = metalines[3].strip().split()
2422    assert L[0].strip().lower() == 'zunits'     #IGNORE
2423    zunits = L[1].strip()                       #TEXT
2424
2425    L = metalines[4].strip().split()
2426    assert L[0].strip().lower() == 'units'
2427    units = L[1].strip()                        #TEXT
2428
2429    L = metalines[5].strip().split()
2430    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2431    spheroid = L[1].strip()                     #TEXT
2432
2433    L = metalines[6].strip().split()
2434    assert L[0].strip().lower() == 'xshift'
2435    false_easting = float(L[1].strip())
2436
2437    L = metalines[7].strip().split()
2438    assert L[0].strip().lower() == 'yshift'
2439    false_northing = float(L[1].strip())
2440
2441    #print false_easting, false_northing, zone, datum
2442
2443
2444    ###########################################
2445    #Read DEM data
2446
2447    datafile = open(basename_in + '.asc')
2448
2449    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2450    lines = datafile.readlines()
2451    datafile.close()
2452
2453    if verbose: print 'Got', len(lines), ' lines'
2454
2455    ncols = int(lines[0].split()[1].strip())
2456    nrows = int(lines[1].split()[1].strip())
2457    xllcorner = float(lines[2].split()[1].strip())
2458    yllcorner = float(lines[3].split()[1].strip())
2459    cellsize = float(lines[4].split()[1].strip())
2460    NODATA_value = int(lines[5].split()[1].strip())
2461
2462    assert len(lines) == nrows + 6
2463
2464
2465    ##########################################
2466
2467
2468    if basename_out == None:
2469        netcdfname = root + '.dem'
2470    else:
2471        netcdfname = basename_out + '.dem'
2472
2473    if verbose: print 'Store to NetCDF file %s' %netcdfname
2474    # NetCDF file definition
2475    fid = NetCDFFile(netcdfname, 'w')
2476
2477    #Create new file
2478    fid.institution = 'Geoscience Australia'
2479    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2480                      'of spatial point data'
2481
2482    fid.ncols = ncols
2483    fid.nrows = nrows
2484    fid.xllcorner = xllcorner
2485    fid.yllcorner = yllcorner
2486    fid.cellsize = cellsize
2487    fid.NODATA_value = NODATA_value
2488
2489    fid.zone = zone
2490    fid.false_easting = false_easting
2491    fid.false_northing = false_northing
2492    fid.projection = projection
2493    fid.datum = datum
2494    fid.units = units
2495
2496
2497    # dimension definitions
2498    fid.createDimension('number_of_rows', nrows)
2499    fid.createDimension('number_of_columns', ncols)
2500
2501    # variable definitions
2502    fid.createVariable('elevation', Float, ('number_of_rows',
2503                                            'number_of_columns'))
2504
2505    # Get handles to the variables
2506    elevation = fid.variables['elevation']
2507
2508    #Store data
2509    n = len(lines[6:])
2510    for i, line in enumerate(lines[6:]):
2511        fields = line.split()
2512        if verbose and i%((n+10)/10)==0:
2513            print 'Processing row %d of %d' %(i, nrows)
2514
2515        elevation[i, :] = array([float(x) for x in fields])
2516
2517    fid.close()
2518
2519
2520
2521
2522
2523def ferret2sww(basename_in, basename_out = None,
2524               verbose = False,
2525               minlat = None, maxlat = None,
2526               minlon = None, maxlon = None,
2527               mint = None, maxt = None, mean_stage = 0,
2528               origin = None, zscale = 1,
2529               fail_on_NaN = True,
2530               NaN_filler = 0,
2531               elevation = None,
[3694]2532               inverted_bathymetry = True
[2852]2533               ): #FIXME: Bathymetry should be obtained
2534                                  #from MOST somehow.
2535                                  #Alternatively from elsewhere
2536                                  #or, as a last resort,
2537                                  #specified here.
2538                                  #The value of -100 will work
2539                                  #for the Wollongong tsunami
2540                                  #scenario but is very hacky
2541    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
[3560]2542    sww format native to abstract_2d_finite_volumes.
[2852]2543
2544    Specify only basename_in and read files of the form
2545    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2546    relative height, x-velocity and y-velocity, respectively.
2547
2548    Also convert latitude and longitude to UTM. All coordinates are
2549    assumed to be given in the GDA94 datum.
2550
2551    min's and max's: If omitted - full extend is used.
2552    To include a value min may equal it, while max must exceed it.
2553    Lat and lon are assuemd to be in decimal degrees
2554
2555    origin is a 3-tuple with geo referenced
2556    UTM coordinates (zone, easting, northing)
2557
2558    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2559    which means that longitude is the fastest
2560    varying dimension (row major order, so to speak)
2561
2562    ferret2sww uses grid points as vertices in a triangular grid
2563    counting vertices from lower left corner upwards, then right
2564    """
2565
2566    import os
2567    from Scientific.IO.NetCDF import NetCDFFile
2568    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2569    from Numeric import allclose, around
2570
2571    precision = Float
2572
2573    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2574
2575    if minlat != None:
2576        assert -90 < minlat < 90 , msg
2577    if maxlat != None:
2578        assert -90 < maxlat < 90 , msg
2579    if minlon != None:
2580        assert -180 < minlon < 180 , msg
2581    if maxlon != None:
2582        assert -180 < maxlon < 180 , msg
2583
2584
2585    #Get NetCDF data
2586    if verbose: print 'Reading files %s_*.nc' %basename_in
[3694]2587    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
[2852]2588    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2589    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2590    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2591    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2592
2593    if basename_out is None:
2594        swwname = basename_in + '.sww'
2595    else:
2596        swwname = basename_out + '.sww'
2597
2598#    Get dimensions of file_h
2599    for dimension in file_h.dimensions.keys():
2600        if dimension[:3] == 'LON':
2601            dim_h_longitude = dimension
2602        if dimension[:3] == 'LAT':
2603            dim_h_latitude = dimension
2604        if dimension[:4] == 'TIME':
2605            dim_h_time = dimension
2606
2607#    print 'long:', dim_h_longitude
2608#    print 'lats:', dim_h_latitude
2609#    print 'times:', dim_h_time
2610
2611    times = file_h.variables[dim_h_time]
2612    latitudes = file_h.variables[dim_h_latitude]
2613    longitudes = file_h.variables[dim_h_longitude]
2614   
2615# get dimensions for file_e
2616    for dimension in file_e.dimensions.keys():
2617        if dimension[:3] == 'LON':
2618            dim_e_longitude = dimension
2619        if dimension[:3] == 'LAT':
2620            dim_e_latitude = dimension
2621
2622# get dimensions for file_u
2623    for dimension in file_u.dimensions.keys():
2624        if dimension[:3] == 'LON':
2625            dim_u_longitude = dimension
2626        if dimension[:3] == 'LAT':
2627            dim_u_latitude = dimension
2628        if dimension[:4] == 'TIME':
2629            dim_u_time = dimension
2630           
2631# get dimensions for file_v
2632    for dimension in file_v.dimensions.keys():
2633        if dimension[:3] == 'LON':
2634            dim_v_longitude = dimension
2635        if dimension[:3] == 'LAT':
2636            dim_v_latitude = dimension
2637        if dimension[:4] == 'TIME':
2638            dim_v_time = dimension
2639
2640
2641    #Precision used by most for lat/lon is 4 or 5 decimals
2642    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2643    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2644
2645    #Check that files are compatible
2646    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2647    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2648    assert allclose(latitudes, e_lat)
2649
2650    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2651    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2652    assert allclose(longitudes, e_lon)
2653
2654    if mint == None:
2655        jmin = 0
2656    else:
2657        jmin = searchsorted(times, mint)
2658
2659    if maxt == None:
2660        jmax=len(times)
2661    else:
2662        jmax = searchsorted(times, maxt)
2663
2664    if minlat == None:
2665        kmin=0
2666    else:
2667        kmin = searchsorted(latitudes, minlat)
2668
2669    if maxlat == None:
2670        kmax = len(latitudes)
2671    else:
2672        kmax = searchsorted(latitudes, maxlat)
2673
2674    if minlon == None:
2675        lmin=0
2676    else:
2677        lmin = searchsorted(longitudes, minlon)
2678
2679    if maxlon == None:
2680        lmax = len(longitudes)
2681    else:
2682        lmax = searchsorted(longitudes, maxlon)
2683
2684#    print' j', jmin, jmax
2685    times = times[jmin:jmax]
2686    latitudes = latitudes[kmin:kmax]
2687    longitudes = longitudes[lmin:lmax]
2688
2689
2690    if verbose: print 'cropping'
2691    zname = 'ELEVATION'
2692
2693    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2694    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2695    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2696    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2697
2698    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2699    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2700    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2701    #        from Numeric import asarray
2702    #        elevations=elevations.tolist()
2703    #        elevations.reverse()
2704    #        elevations=asarray(elevations)
2705    #    else:
2706    #        from Numeric import asarray
2707    #        elevations=elevations.tolist()
2708    #        elevations.reverse()
2709    #        elevations=asarray(elevations)
2710    #        'print hmmm'
2711
2712
2713
2714    #Get missing values
2715    nan_ha = file_h.variables['HA'].missing_value[0]
2716    nan_ua = file_u.variables['UA'].missing_value[0]
2717    nan_va = file_v.variables['VA'].missing_value[0]
2718    if hasattr(file_e.variables[zname],'missing_value'):
2719        nan_e  = file_e.variables[zname].missing_value[0]
2720    else:
2721        nan_e = None
2722
2723    #Cleanup
2724    from Numeric import sometrue
2725
2726    missing = (amplitudes == nan_ha)
2727    if sometrue (missing):
2728        if fail_on_NaN:
2729            msg = 'NetCDFFile %s contains missing values'\
2730                  %(basename_in+'_ha.nc')
2731            raise DataMissingValuesError, msg
2732        else:
2733            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2734
2735    missing = (uspeed == nan_ua)
2736    if sometrue (missing):
2737        if fail_on_NaN:
2738            msg = 'NetCDFFile %s contains missing values'\
2739                  %(basename_in+'_ua.nc')
2740            raise DataMissingValuesError, msg
2741        else:
2742            uspeed = uspeed*(missing==0) + missing*NaN_filler
2743
2744    missing = (vspeed == nan_va)
2745    if sometrue (missing):
2746        if fail_on_NaN:
2747            msg = 'NetCDFFile %s contains missing values'\
2748                  %(basename_in+'_va.nc')
2749            raise DataMissingValuesError, msg
2750        else:
2751            vspeed = vspeed*(missing==0) + missing*NaN_filler
2752
2753
2754    missing = (elevations == nan_e)
2755    if sometrue (missing):
2756        if fail_on_NaN:
2757            msg = 'NetCDFFile %s contains missing values'\
2758                  %(basename_in+'_e.nc')
2759            raise DataMissingValuesError, msg
2760        else:
2761            elevations = elevations*(missing==0) + missing*NaN_filler
2762
2763    #######
2764
2765
2766
2767    number_of_times = times.shape[0]
2768    number_of_latitudes = latitudes.shape[0]
2769    number_of_longitudes = longitudes.shape[0]
2770
2771    assert amplitudes.shape[0] == number_of_times
2772    assert amplitudes.shape[1] == number_of_latitudes
2773    assert amplitudes.shape[2] == number_of_longitudes
2774
2775    if verbose:
2776        print '------------------------------------------------'
2777        print 'Statistics:'
2778        print '  Extent (lat/lon):'
2779        print '    lat in [%f, %f], len(lat) == %d'\
2780              %(min(latitudes.flat), max(latitudes.flat),
2781                len(latitudes.flat))
2782        print '    lon in [%f, %f], len(lon) == %d'\
2783              %(min(longitudes.flat), max(longitudes.flat),
2784                len(longitudes.flat))
2785        print '    t in [%f, %f], len(t) == %d'\
2786              %(min(times.flat), max(times.flat), len(times.flat))
2787
2788        q = amplitudes.flat
2789        name = 'Amplitudes (ha) [cm]'
2790        print %s in [%f, %f]' %(name, min(q), max(q))
2791
2792        q = uspeed.flat
2793        name = 'Speeds (ua) [cm/s]'
2794        print %s in [%f, %f]' %(name, min(q), max(q))
2795
2796        q = vspeed.flat
2797        name = 'Speeds (va) [cm/s]'
2798        print %s in [%f, %f]' %(name, min(q), max(q))
2799
2800        q = elevations.flat
2801        name = 'Elevations (e) [m]'
2802        print %s in [%f, %f]' %(name, min(q), max(q))
2803
2804
2805    #print number_of_latitudes, number_of_longitudes
2806    number_of_points = number_of_latitudes*number_of_longitudes
2807    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2808
2809
2810    file_h.close()
2811    file_u.close()
2812    file_v.close()
2813    file_e.close()
2814
2815
2816    # NetCDF file definition
2817    outfile = NetCDFFile(swwname, 'w')
2818
2819    #Create new file
2820    outfile.institution = 'Geoscience Australia'
2821    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2822                          %(basename_in + '_ha.nc',
2823                            basename_in + '_ua.nc',
2824                            basename_in + '_va.nc',
2825                            basename_in + '_e.nc')
2826
2827
2828    #For sww compatibility
2829    outfile.smoothing = 'Yes'
2830    outfile.order = 1
2831
2832    #Start time in seconds since the epoch (midnight 1/1/1970)
2833    outfile.starttime = starttime = times[0]
2834    times = times - starttime  #Store relative times
2835
2836    # dimension definitions
2837    outfile.createDimension('number_of_volumes', number_of_volumes)
2838
2839    outfile.createDimension('number_of_vertices', 3)
2840    outfile.createDimension('number_of_points', number_of_points)
2841
2842
2843    #outfile.createDimension('number_of_timesteps', len(times))
2844    outfile.createDimension('number_of_timesteps', len(times))
2845
2846    # variable definitions
2847    outfile.createVariable('x', precision, ('number_of_points',))
2848    outfile.createVariable('y', precision, ('number_of_points',))
2849    outfile.createVariable('elevation', precision, ('number_of_points',))
2850
2851    #FIXME: Backwards compatibility
2852    outfile.createVariable('z', precision, ('number_of_points',))
2853    #################################
2854
2855    outfile.createVariable('volumes', Int, ('number_of_volumes',
2856                                            'number_of_vertices'))
2857
2858    outfile.createVariable('time', precision,
2859                           ('number_of_timesteps',))
2860
2861    outfile.createVariable('stage', precision,
2862                           ('number_of_timesteps',
2863                            'number_of_points'))
2864
2865    outfile.createVariable('xmomentum', precision,
2866                           ('number_of_timesteps',
2867                            'number_of_points'))
2868
2869    outfile.createVariable('ymomentum', precision,
2870                           ('number_of_timesteps',
2871                            'number_of_points'))
2872
2873
2874    #Store
[3514]2875    from anuga.coordinate_transforms.redfearn import redfearn
[2852]2876    x = zeros(number_of_points, Float)  #Easting
2877    y = zeros(number_of_points, Float)  #Northing
2878
2879
2880    if verbose: print 'Making triangular grid'
2881    #Check zone boundaries
2882    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2883
2884    vertices = {}
2885    i = 0
2886    for k, lat in enumerate(latitudes):       #Y direction
2887        for l, lon in enumerate(longitudes):  #X direction
2888
2889            vertices[l,k] = i
2890
2891            zone, easting, northing = redfearn(lat,lon)
2892
2893            msg = 'Zone boundary crossed at longitude =', lon
2894            #assert zone == refzone, msg
2895            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2896            x[i] = easting
2897            y[i] = northing
2898            i += 1
2899
2900
2901    #Construct 2 triangles per 'rectangular' element
2902    volumes = []
2903    for l in range(number_of_longitudes-1):    #X direction
2904        for k in range(number_of_latitudes-1): #Y direction
2905            v1 = vertices[l,k+1]
2906            v2 = vertices[l,k]
2907            v3 = vertices[l+1,k+1]
2908            v4 = vertices[l+1,k]
2909
2910            volumes.append([v1,v2,v3]) #Upper element
2911            volumes.append([v4,v3,v2]) #Lower element
2912
2913    volumes = array(volumes)
2914
2915    if origin == None:
2916        zone = refzone
2917        xllcorner = min(x)
2918        yllcorner = min(y)
2919    else:
2920        zone = origin[0]
2921        xllcorner = origin[1]
2922        yllcorner = origin[2]
2923
2924
2925    outfile.xllcorner = xllcorner
2926    outfile.yllcorner = yllcorner
2927    outfile.zone = zone
2928
2929
2930    if elevation is not None:
2931        z = elevation
2932    else:
2933        if inverted_bathymetry:
2934            z = -1*elevations
2935        else:
2936            z = elevations
2937    #FIXME: z should be obtained from MOST and passed in here
2938
2939    from Numeric import resize
2940    z = resize(z,outfile.variables['z'][:].shape)
2941    outfile.variables['x'][:] = x - xllcorner
2942    outfile.variables['y'][:] = y - yllcorner
2943    outfile.variables['z'][:] = z             #FIXME HACK
2944    outfile.variables['elevation'][:] = z
2945    outfile.variables['time'][:] = times   #Store time relative
2946    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2947
2948
2949
2950    #Time stepping
2951    stage = outfile.variables['stage']
2952    xmomentum = outfile.variables['xmomentum']
2953    ymomentum = outfile.variables['ymomentum']
2954
2955    if verbose: print 'Converting quantities'
2956    n = len(times)
2957    for j in range(n):
2958        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2959        i = 0
2960        for k in range(number_of_latitudes):      #Y direction
2961            for l in range(number_of_longitudes): #X direction
2962                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2963                stage[j,i] = w
2964                h = w - z[i]
2965                xmomentum[j,i] = uspeed[j,k,l]/100*h
2966                ymomentum[j,i] = vspeed[j,k,l]/100*h
2967                i += 1
2968
2969    #outfile.close()
2970
2971    #FIXME: Refactor using code from file_function.statistics
2972    #Something like print swwstats(swwname)
2973    if verbose:
2974        x = outfile.variables['x'][:]
2975        y = outfile.variables['y'][:]
2976        times = outfile.variables['time'][:]
2977        print '------------------------------------------------'
2978        print 'Statistics of output file:'
2979        print '  Name: %s' %swwname
2980        print '  Reference:'
2981        print '    Lower left corner: [%f, %f]'\
2982              %(xllcorner, yllcorner)
2983        print '    Start time: %f' %starttime
2984        print '  Extent:'
2985        print '    x [m] in [%f, %f], len(x) == %d'\
2986              %(min(x.flat), max(x.flat), len(x.flat))
2987        print '    y [m] in [%f, %f], len(y) == %d'\
2988              %(min(y.flat), max(y.flat), len(y.flat))
2989        print '    t [s] in [%f, %f], len(t) == %d'\
2990              %(min(times), max(times), len(times))
2991        print '  Quantities [SI units]:'
2992        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2993            q = outfile.variables[name][:].flat
2994            print '    %s in [%f, %f]' %(name, min(q), max(q))
2995
2996
2997
2998    outfile.close()
2999
3000
3001
3002
3003
3004def timefile2netcdf(filename, quantity_names = None):
3005    """Template for converting typical text files with time series to
3006    NetCDF tms file.
3007
3008
3009    The file format is assumed to be either two fields separated by a comma:
3010
3011        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3012
3013    E.g
3014
3015      31/08/04 00:00:00, 1.328223 0 0
3016      31/08/04 00:15:00, 1.292912 0 0
3017
3018    will provide a time dependent function f(t) with three attributes
3019
3020    filename is assumed to be the rootname with extenisons .txt and .sww
3021    """
3022
3023    import time, calendar
3024    from Numeric import array
[3514]3025    from anuga.config import time_format
3026    from anuga.utilities.numerical_tools import ensure_numeric
[2852]3027
3028
3029
3030    fid = open(filename + '.txt')
3031    line = fid.readline()
3032    fid.close()
3033
3034    fields = line.split(',')
3035    msg = 'File %s must have the format date, value0 value1 value2 ...'
3036    assert len(fields) == 2, msg
3037
3038    try:
3039        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3040    except ValueError:
3041        msg = 'First field in file %s must be' %filename
3042        msg += ' date-time with format %s.\n' %time_format
3043        msg += 'I got %s instead.' %fields[0]
3044        raise DataTimeError, msg
3045
3046
3047    #Split values
3048    values = []
3049    for value in fields[1].split():
3050        values.append(float(value))
3051
3052    q = ensure_numeric(values)
3053
3054    msg = 'ERROR: File must contain at least one independent value'
3055    assert len(q.shape) == 1, msg
3056
3057
3058
3059    #Read times proper
3060    from Numeric import zeros, Float, alltrue
[3514]3061    from anuga.config import time_format
[2852]3062    import time, calendar
3063
3064    fid = open(filename + '.txt')
3065    lines = fid.readlines()
3066    fid.close()
3067
3068    N = len(lines)
3069    d = len(q)
3070
3071    T = zeros(N, Float)       #Time
3072    Q = zeros((N, d), Float)  #Values
3073
3074    for i, line in enumerate(lines):
3075        fields = line.split(',')
3076        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3077
3078        T[i] = realtime - starttime
3079
3080        for j, value in enumerate(fields[1].split()):
3081            Q[i, j] = float(value)
3082
3083    msg = 'File %s must list time as a monotonuosly ' %filename
3084    msg += 'increasing sequence'
3085    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3086
3087
3088    #Create NetCDF file
3089    from Scientific.IO.NetCDF import NetCDFFile
3090
3091    fid = NetCDFFile(filename + '.tms', 'w')
3092
3093
3094    fid.institution = 'Geoscience Australia'
3095    fid.description = 'Time series'
3096
3097
3098    #Reference point
3099    #Start time in seconds since the epoch (midnight 1/1/1970)
3100    #FIXME: Use Georef
3101    fid.starttime = starttime
3102
3103
3104    # dimension definitions
3105    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3106    #fid.createDimension('number_of_vertices', 3)
3107
3108
3109    fid.createDimension('number_of_timesteps', len(T))
3110
3111    fid.createVariable('time', Float, ('number_of_timesteps',))
3112
3113    fid.variables['time'][:] = T
3114
3115    for i in range(Q.shape[1]):
3116        try:
3117            name = quantity_names[i]
3118        except:
3119            name = 'Attribute%d'%i
3120
3121        fid.createVariable(name, Float, ('number_of_timesteps',))
3122        fid.variables[name][:] = Q[:,i]
3123
3124    fid.close()
3125
3126
3127def extent_sww(file_name):
3128    """
3129    Read in an sww file.
3130
3131    Input;
3132    file_name - the sww file
3133
3134    Output;
3135    z - Vector of bed elevation
3136    volumes - Array.  Each row has 3 values, representing
3137    the vertices that define the volume
3138    time - Vector of the times where there is stage information
3139    stage - array with respect to time and vertices (x,y)
3140    """
3141
3142
3143    from Scientific.IO.NetCDF import NetCDFFile
3144
3145    #Check contents
3146    #Get NetCDF
3147    fid = NetCDFFile(file_name, 'r')
3148
3149    # Get the variables
3150    x = fid.variables['x'][:]
3151    y = fid.variables['y'][:]
3152    stage = fid.variables['stage'][:]
3153    #print "stage",stage
3154    #print "stage.shap",stage.shape
3155    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3156    #print "min(stage)",min(stage)
3157
3158    fid.close()
3159
3160    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3161
3162
3163def sww2domain(filename,boundary=None,t=None,\
3164               fail_if_NaN=True,NaN_filler=0\
3165               ,verbose = False,very_verbose = False):
3166    """
3167    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3168
3169    Boundary is not recommended if domain.smooth is not selected, as it
3170    uses unique coordinates, but not unique boundaries. This means that
3171    the boundary file will not be compatable with the coordinates, and will
3172    give a different final boundary, or crash.
3173    """
3174    NaN=9.969209968386869e+036
3175    #initialise NaN.
3176
3177    from Scientific.IO.NetCDF import NetCDFFile
3178    from shallow_water import Domain
3179    from Numeric import asarray, transpose, resize
3180
3181    if verbose: print 'Reading from ', filename
3182    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3183    time = fid.variables['time']       #Timesteps
3184    if t is None:
3185        t = time[-1]
3186    time_interp = get_time_interp(time,t)
3187
3188    # Get the variables as Numeric arrays
3189    x = fid.variables['x'][:]             #x-coordinates of vertices
3190    y = fid.variables['y'][:]             #y-coordinates of vertices
3191    elevation = fid.variables['elevation']     #Elevation
3192    stage = fid.variables['stage']     #Water level
3193    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3194    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3195
3196    starttime = fid.starttime[0]
3197    volumes = fid.variables['volumes'][:] #Connectivity
3198    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3199
3200    conserved_quantities = []
3201    interpolated_quantities = {}
3202    other_quantities = []
3203
3204    # get geo_reference
3205    #sww files don't have to have a geo_ref
3206    try:
3207        geo_reference = Geo_reference(NetCDFObject=fid)
3208    except: #AttributeError, e:
3209        geo_reference = None
3210
3211    if verbose: print '    getting quantities'
3212    for quantity in fid.variables.keys():
3213        dimensions = fid.variables[quantity].dimensions
3214        if 'number_of_timesteps' in dimensions:
3215            conserved_quantities.append(quantity)
3216            interpolated_quantities[quantity]=\
3217                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3218        else: other_quantities.append(quantity)
3219
3220    other_quantities.remove('x')
3221    other_quantities.remove('y')
3222    other_quantities.remove('z')
3223    other_quantities.remove('volumes')
3224
3225    conserved_quantities.remove('time')
3226
3227    if verbose: print '    building domain'
3228    #    From domain.Domain:
3229    #    domain = Domain(coordinates, volumes,\
3230    #                    conserved_quantities = conserved_quantities,\
3231    #                    other_quantities = other_quantities,zone=zone,\
3232    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3233
3234    #   From shallow_water.Domain:
3235    coordinates=coordinates.tolist()
3236    volumes=volumes.tolist()
3237    #FIXME:should this be in mesh?(peter row)
3238    if fid.smoothing == 'Yes': unique = False
3239    else: unique = True
3240    if unique:
3241        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3242
3243
3244    try:
3245        domain = Domain(coordinates, volumes, boundary)
3246    except AssertionError, e:
3247        fid.close()
3248        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3249        raise DataDomainError, msg
3250
3251    if not boundary is None:
3252        domain.boundary = boundary
3253
3254    domain.geo_reference = geo_reference
3255
3256    domain.starttime=float(starttime)+float(t)
3257    domain.time=0.0
3258
3259    for quantity in other_quantities:
3260        try:
3261            NaN = fid.variables[quantity].missing_value
3262        except:
3263            pass #quantity has no missing_value number
3264        X = fid.variables[quantity][:]
3265        if very_verbose:
3266            print '       ',quantity
3267            print '        NaN =',NaN
3268            print '        max(X)'
3269            print '       ',max(X)
3270            print '        max(X)==NaN'
3271            print '       ',max(X)==NaN
3272            print ''
3273        if (max(X)==NaN) or (min(X)==NaN):
3274            if fail_if_NaN:
3275                msg = 'quantity "%s" contains no_data entry'%quantity
3276                raise DataMissingValuesError, msg
3277            else:
3278                data = (X<>NaN)
3279                X = (X*data)+(data==0)*NaN_filler
3280        if unique:
3281            X = resize(X,(len(X)/3,3))
3282        domain.set_quantity(quantity,X)
3283    #
3284    for quantity in conserved_quantities:
3285        try:
3286            NaN = fid.variables[quantity].missing_value
3287        except:
3288            pass #quantity has no missing_value number
3289        X = interpolated_quantities[quantity]
3290        if very_verbose:
3291            print '       ',quantity
3292            print '        NaN =',NaN
3293            print '        max(X)'
3294            print '       ',max(X)
3295            print '        max(X)==NaN'
3296            print '       ',max(X)==NaN
3297            print ''
3298        if (max(X)==NaN) or (min(X)==NaN):
3299            if fail_if_NaN:
3300                msg = 'quantity "%s" contains no_data entry'%quantity
3301                raise DataMissingValuesError, msg
3302            else:
3303                data = (X<>NaN)
3304                X = (X*data)+(data==0)*NaN_filler
3305        if unique:
3306            X = resize(X,(X.shape[0]/3,3))
3307        domain.set_quantity(quantity,X)
3308
3309    fid.close()
3310    return domain
3311
3312def interpolated_quantity(saved_quantity,time_interp):
3313
3314    #given an index and ratio, interpolate quantity with respect to time.
3315    index,ratio = time_interp
3316    Q = saved_quantity
3317    if ratio > 0:
3318        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3319    else:
3320        q = Q[index]
3321    #Return vector of interpolated values
3322    return q
3323
3324def get_time_interp(time,t=None):
3325    #Finds the ratio and index for time interpolation.
[3560]3326    #It is borrowed from previous abstract_2d_finite_volumes code.
[2852]3327    if t is None:
3328        t=time[-1]
3329        index = -1
3330        ratio = 0.
3331    else:
3332        T = time
3333        tau = t
3334        index=0
3335        msg = 'Time interval derived from file %s [%s:%s]'\
3336            %('FIXMEfilename', T[0], T[-1])
3337        msg += ' does not match model time: %s' %tau
3338        if tau < time[0]: raise DataTimeError, msg
3339        if tau > time[-1]: raise DataTimeError, msg
3340        while tau > time[index]: index += 1
3341        while tau < time[index]: index -= 1
3342        if tau == time[index]:
3343            #Protect against case where tau == time[-1] (last time)
3344            # - also works in general when tau == time[i]
3345            ratio = 0
3346        else:
3347            #t is now between index and index+1
3348            ratio = (tau - time[index])/(time[index+1] - time[index])
3349    return (index,ratio)
3350
3351
3352def weed(coordinates,volumes,boundary = None):
3353    if type(coordinates)=='array':
3354        coordinates = coordinates.tolist()
3355    if type(volumes)=='array':
3356        volumes = volumes.tolist()
3357
3358    unique = False
3359    point_dict = {}
3360    same_point = {}
3361    for i in range(len(coordinates)):
3362        point = tuple(coordinates[i])
3363        if point_dict.has_key(point):
3364            unique = True
3365            same_point[i]=point
3366            #to change all point i references to point j
3367        else:
3368            point_dict[point]=i
3369            same_point[i]=point
3370
3371    coordinates = []
3372    i = 0
3373    for point in point_dict.keys():
3374        point = tuple(point)
3375        coordinates.append(list(point))
3376        point_dict[point]=i
3377        i+=1
3378
3379
3380    for volume in volumes:
3381        for i in range(len(volume)):
3382            index = volume[i]
3383            if index>-1:
3384                volume[i]=point_dict[same_point[index]]
3385
3386    new_boundary = {}
3387    if not boundary is None:
3388        for segment in boundary.keys():
3389            point0 = point_dict[same_point[segment[0]]]
3390            point1 = point_dict[same_point[segment[1]]]
3391            label = boundary[segment]
3392            #FIXME should the bounday attributes be concaterated
3393            #('exterior, pond') or replaced ('pond')(peter row)
3394
3395            if new_boundary.has_key((point0,point1)):
3396                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3397                                              #+','+label
3398
3399            elif new_boundary.has_key((point1,point0)):
3400                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3401                                              #+','+label
3402            else: new_boundary[(point0,point1)]=label
3403
3404        boundary = new_boundary
3405
3406    return coordinates,volumes,boundary
3407
3408
3409def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3410                 verbose=False):
3411    """Read Digitial Elevation model from the following NetCDF format (.dem)
3412
3413    Example:
3414
3415    ncols         3121
3416    nrows         1800
3417    xllcorner     722000
3418    yllcorner     5893000
3419    cellsize      25
3420    NODATA_value  -9999
3421    138.3698 137.4194 136.5062 135.5558 ..........
3422
3423    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3424    """
3425
3426    import os
3427    from Scientific.IO.NetCDF import NetCDFFile
3428    from Numeric import Float, zeros, sum, reshape, equal
3429
3430    root = basename_in
3431    inname = root + '.dem'
3432
3433    #Open existing netcdf file to read
3434    infile = NetCDFFile(inname, 'r')
3435    if verbose: print 'Reading DEM from %s' %inname
3436
3437    #Read metadata
3438    ncols = infile.ncols[0]
3439    nrows = infile.nrows[0]
3440    xllcorner = infile.xllcorner[0]
3441    yllcorner = infile.yllcorner[0]
3442    cellsize = infile.cellsize[0]
3443    NODATA_value = infile.NODATA_value[0]
3444    zone = infile.zone[0]
3445    false_easting = infile.false_easting[0]
3446    false_northing = infile.false_northing[0]
3447    projection = infile.projection
3448    datum = infile.datum
3449    units = infile.units
3450
3451    dem_elevation = infile.variables['elevation']
3452
3453    #Get output file name
3454    if basename_out == None:
3455        outname = root + '_' + repr(cellsize_new) + '.dem'
3456    else:
3457        outname = basename_out + '.dem'
3458
3459    if verbose: print 'Write decimated NetCDF file to %s' %outname
3460
3461    #Determine some dimensions for decimated grid
3462    (nrows_stencil, ncols_stencil) = stencil.shape
3463    x_offset = ncols_stencil / 2
3464    y_offset = nrows_stencil / 2
3465    cellsize_ratio = int(cellsize_new / cellsize)
3466    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3467    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3468
3469    #Open netcdf file for output
3470    outfile = NetCDFFile(outname, 'w')
3471
3472    #Create new file
3473    outfile.institution = 'Geoscience Australia'
3474    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3475                           'of spatial point data'
3476    #Georeferencing
3477    outfile.zone = zone
3478    outfile.projection = projection
3479    outfile.datum = datum
3480    outfile.units = units
3481
3482    outfile.cellsize = cellsize_new
3483    outfile.NODATA_value = NODATA_value
3484    outfile.false_easting = false_easting
3485    outfile.false_northing = false_northing
3486
3487    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3488    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3489    outfile.ncols = ncols_new
3490    outfile.nrows = nrows_new
3491
3492    # dimension definition
3493    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3494
3495    # variable definition
3496    outfile.createVariable('elevation', Float, ('number_of_points',))
3497
3498    # Get handle to the variable
3499    elevation = outfile.variables['elevation']
3500
3501    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3502
3503    #Store data
3504    global_index = 0
3505    for i in range(nrows_new):
3506        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3507        lower_index = global_index
3508        telev =  zeros(ncols_new, Float)
3509        local_index = 0
3510        trow = i * cellsize_ratio
3511
3512        for j in range(ncols_new):
3513            tcol = j * cellsize_ratio
3514            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3515
3516            #if dem contains 1 or more NODATA_values set value in
3517            #decimated dem to NODATA_value, else compute decimated
3518            #value using stencil
3519            if sum(sum(equal(tmp, NODATA_value))) > 0:
3520                telev[local_index] = NODATA_value
3521            else:
3522                telev[local_index] = sum(sum(tmp * stencil))
3523
3524            global_index += 1
3525            local_index += 1
3526
3527        upper_index = global_index
3528
3529        elevation[lower_index:upper_index] = telev
3530
3531    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3532
3533    infile.close()
3534    outfile.close()
3535
3536
3537
3538
3539def tsh2sww(filename, verbose=False): #test_tsh2sww
3540    """
3541    to check if a tsh/msh file 'looks' good.
3542    """
3543
3544    from shallow_water import Domain
[3560]3545    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
[2852]3546    import time, os
[3563]3547    #from data_manager import get_dataobject
[2852]3548    from os import sep, path
[3514]3549    from anuga.utilities.numerical_tools import mean
[2852]3550
3551    if verbose == True:print 'Creating domain from', filename
3552    domain = pmesh_to_domain_instance(filename, Domain)
3553    if verbose == True:print "Number of triangles = ", len(domain)
3554
3555    domain.smooth = True
3556    domain.format = 'sww'   #Native netcdf visualisation format
3557    file_path, filename = path.split(filename)
3558    filename, ext = path.splitext(filename)
3559    domain.filename = filename
3560    domain.reduction = mean
3561    if verbose == True:print "file_path",file_path
3562    if file_path == "":file_path = "."
3563    domain.set_datadir(file_path)
3564
3565    if verbose == True:
3566        print "Output written to " + domain.get_datadir() + sep + \
3567              domain.filename + "." + domain.format
3568    sww = get_dataobject(domain)
3569    sww.store_connectivity()
3570    sww.store_timestep('stage')
3571
3572
3573def asc_csiro2sww(bath_dir,
3574                  elevation_dir,
3575                  ucur_dir,
3576                  vcur_dir,
3577                  sww_file,
3578                  minlat = None, maxlat = None,
3579                  minlon = None, maxlon = None,
3580                  zscale=1,
3581                  mean_stage = 0,
3582                  fail_on_NaN = True,
3583                  elevation_NaN_filler = 0,
3584                  bath_prefix='ba',
3585                  elevation_prefix='el',
3586                  verbose=False):
3587    """
3588    Produce an sww boundary file, from esri ascii data from CSIRO.
3589
3590    Also convert latitude and longitude to UTM. All coordinates are
3591    assumed to be given in the GDA94 datum.
3592
3593    assume:
3594    All files are in esri ascii format
3595
3596    4 types of information
3597    bathymetry
3598    elevation
3599    u velocity
3600    v velocity
3601
3602    Assumptions
3603    The metadata of all the files is the same
3604    Each type is in a seperate directory
3605    One bath file with extention .000
3606    The time period is less than 24hrs and uniform.
3607    """
3608    from Scientific.IO.NetCDF import NetCDFFile
3609
[3514]3610    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3611
3612    precision = Float # So if we want to change the precision its done here
3613
3614    # go in to the bath dir and load the only file,
3615    bath_files = os.listdir(bath_dir)
3616    #print "bath_files",bath_files
3617
3618    #fixme: make more general?
3619    bath_file = bath_files[0]
3620    bath_dir_file =  bath_dir + os.sep + bath_file
3621    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3622    #print "bath_metadata",bath_metadata
3623    #print "bath_grid",bath_grid
3624
3625    #Use the date.time of the bath file as a basis for
3626    #the start time for other files
3627    base_start = bath_file[-12:]
3628
3629    #go into the elevation dir and load the 000 file
3630    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3631                         + base_start
3632    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3633    #print "elevation_dir_file",elevation_dir_file
3634    #print "elevation_grid", elevation_grid
3635
3636    elevation_files = os.listdir(elevation_dir)
3637    ucur_files = os.listdir(ucur_dir)
3638    vcur_files = os.listdir(vcur_dir)
3639
3640    # the first elevation file should be the
3641    # file with the same base name as the bath data
3642    #print "elevation_files[0]",elevation_files[0]
3643    #print "'el' + base_start",'el' + base_start
3644    assert elevation_files[0] == 'el' + base_start
3645
3646    #assert bath_metadata == elevation_metadata
3647
3648
3649
3650    number_of_latitudes = bath_grid.shape[0]
3651    number_of_longitudes = bath_grid.shape[1]
3652    #number_of_times = len(os.listdir(elevation_dir))
3653    #number_of_points = number_of_latitudes*number_of_longitudes
3654    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3655
3656    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3657                  for x in range(number_of_longitudes)]
3658    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3659                 for y in range(number_of_latitudes)]
3660
3661     # reverse order of lat, so the fist lat represents the first grid row
3662    latitudes.reverse()
3663
3664    #print "latitudes - before _get_min_max_indexes",latitudes
3665    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3666                                                 minlat=minlat, maxlat=maxlat,
3667                                                 minlon=minlon, maxlon=maxlon)
3668
3669
3670    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3671    #print "bath_grid",bath_grid
3672    latitudes = latitudes[kmin:kmax]
3673    longitudes = longitudes[lmin:lmax]
3674    number_of_latitudes = len(latitudes)
3675    number_of_longitudes = len(longitudes)
3676    number_of_times = len(os.listdir(elevation_dir))
3677    number_of_points = number_of_latitudes*number_of_longitudes
3678    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3679    #print "number_of_points",number_of_points
3680
3681    #Work out the times
3682    if len(elevation_files) > 1:
3683        # Assume: The time period is less than 24hrs.
3684        time_period = (int(elevation_files[1][-3:]) - \
3685                      int(elevation_files[0][-3:]))*60*60
3686        times = [x*time_period for x in range(len(elevation_files))]
3687    else:
3688        times = [0.0]
3689    #print "times", times
3690    #print "number_of_latitudes", number_of_latitudes
3691    #print "number_of_longitudes", number_of_longitudes
3692    #print "number_of_times", number_of_times
3693    #print "latitudes", latitudes
3694    #print "longitudes", longitudes
3695
3696
3697    if verbose:
3698        print '------------------------------------------------'
3699        print 'Statistics:'
3700        print '  Extent (lat/lon):'
3701        print '    lat in [%f, %f], len(lat) == %d'\
3702              %(min(latitudes), max(latitudes),
3703                len(latitudes))
3704        print '    lon in [%f, %f], len(lon) == %d'\
3705              %(min(longitudes), max(longitudes),
3706                len(longitudes))
3707        print '    t in [%f, %f], len(t) == %d'\
3708              %(min(times), max(times), len(times))
3709
3710    ######### WRITE THE SWW FILE #############
3711    # NetCDF file definition
3712    outfile = NetCDFFile(sww_file, 'w')
3713
3714    #Create new file
3715    outfile.institution = 'Geoscience Australia'
3716    outfile.description = 'Converted from XXX'
3717
3718
3719    #For sww compatibility
3720    outfile.smoothing = 'Yes'
3721    outfile.order = 1
3722
3723    #Start time in seconds since the epoch (midnight 1/1/1970)
3724    outfile.starttime = starttime = times[0]
3725
3726
3727    # dimension definitions
3728    outfile.createDimension('number_of_volumes', number_of_volumes)
3729
3730    outfile.createDimension('number_of_vertices', 3)
3731    outfile.createDimension('number_of_points', number_of_points)
3732    outfile.createDimension('number_of_timesteps', number_of_times)
3733
3734    # variable definitions
3735    outfile.createVariable('x', precision, ('number_of_points',))
3736    outfile.createVariable('y', precision, ('number_of_points',))
3737    outfile.createVariable('elevation', precision, ('number_of_points',))
3738
3739    #FIXME: Backwards compatibility
3740    outfile.createVariable('z', precision, ('number_of_points',))
3741    #################################
3742
3743    outfile.createVariable('volumes', Int, ('number_of_volumes',
3744                                            'number_of_vertices'))
3745
3746    outfile.createVariable('time', precision,
3747                           ('number_of_timesteps',))
3748
3749    outfile.createVariable('stage', precision,
3750                           ('number_of_timesteps',
3751                            'number_of_points'))
3752
3753    outfile.createVariable('xmomentum', precision,
3754                           ('number_of_timesteps',
3755                            'number_of_points'))
3756
3757    outfile.createVariable('ymomentum', precision,
3758                           ('number_of_timesteps',
3759                            'number_of_points'))
3760
3761    #Store
[3514]3762    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3763    x = zeros(number_of_points, Float)  #Easting
3764    y = zeros(number_of_points, Float)  #Northing
3765
3766    if verbose: print 'Making triangular grid'
3767    #Get zone of 1st point.
3768    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3769
3770    vertices = {}
3771    i = 0
3772    for k, lat in enumerate(latitudes):
3773        for l, lon in enumerate(longitudes):
3774
3775            vertices[l,k] = i
3776
3777            zone, easting, northing = redfearn(lat,lon)
3778
3779            msg = 'Zone boundary crossed at longitude =', lon
3780            #assert zone == refzone, msg
3781            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3782            x[i] = easting
3783            y[i] = northing
3784            i += 1
3785
3786
3787    #Construct 2 triangles per 'rectangular' element
3788    volumes = []
3789    for l in range(number_of_longitudes-1):    #X direction
3790        for k in range(number_of_latitudes-1): #Y direction
3791            v1 = vertices[l,k+1]
3792            v2 = vertices[l,k]
3793            v3 = vertices[l+1,k+1]
3794            v4 = vertices[l+1,k]
3795
3796            #Note, this is different to the ferrit2sww code
3797            #since the order of the lats is reversed.
3798            volumes.append([v1,v3,v2]) #Upper element
3799            volumes.append([v4,v2,v3]) #Lower element
3800
3801    volumes = array(volumes)
3802
3803    geo_ref = Geo_reference(refzone,min(x),min(y))
3804    geo_ref.write_NetCDF(outfile)
3805
3806    # This will put the geo ref in the middle
3807    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3808
3809
3810    if verbose:
3811        print '------------------------------------------------'
3812        print 'More Statistics:'
3813        print '  Extent (/lon):'
3814        print '    x in [%f, %f], len(lat) == %d'\
3815              %(min(x), max(x),
3816                len(x))
3817        print '    y in [%f, %f], len(lon) == %d'\
3818              %(min(y), max(y),
3819                len(y))
3820        print 'geo_ref: ',geo_ref
3821
3822    z = resize(bath_grid,outfile.variables['z'][:].shape)
3823    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3824    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3825    outfile.variables['z'][:] = z
3826    outfile.variables['elevation'][:] = z  #FIXME HACK
3827    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3828
3829    # do this to create an ok sww file.
3830    #outfile.variables['time'][:] = [0]   #Store time relative
3831    #outfile.variables['stage'] = z
3832    # put the next line up in the code after outfile.order = 1
3833    #number_of_times = 1
3834
3835    stage = outfile.variables['stage']
3836    xmomentum = outfile.variables['xmomentum']
3837    ymomentum = outfile.variables['ymomentum']
3838
3839    outfile.variables['time'][:] = times   #Store time relative
3840
3841    if verbose: print 'Converting quantities'
3842    n = number_of_times
3843    for j in range(number_of_times):
3844        # load in files
3845        elevation_meta, elevation_grid = \
3846           _read_asc(elevation_dir + os.sep + elevation_files[j])
3847
3848        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3849        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3850
3851        #print "elevation_grid",elevation_grid
3852        #cut matrix to desired size
3853        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3854        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3855        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3856        #print "elevation_grid",elevation_grid
3857        # handle missing values
3858        missing = (elevation_grid == elevation_meta['NODATA_value'])
3859        if sometrue (missing):
3860            if fail_on_NaN:
3861                msg = 'File %s contains missing values'\
3862                      %(elevation_files[j])
3863                raise DataMissingValuesError, msg
3864            else:
3865                elevation_grid = elevation_grid*(missing==0) + \
3866                                 missing*elevation_NaN_filler
3867
3868
3869        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3870        i = 0
3871        for k in range(number_of_latitudes):      #Y direction
3872            for l in range(number_of_longitudes): #X direction
3873                w = zscale*elevation_grid[k,l] + mean_stage
3874                stage[j,i] = w
3875                h = w - z[i]
3876                xmomentum[j,i] = u_momentum_grid[k,l]*h
3877                ymomentum[j,i] = v_momentum_grid[k,l]*h
3878                i += 1
3879    outfile.close()
3880
3881def _get_min_max_indexes(latitudes,longitudes,
3882                        minlat=None, maxlat=None,
3883                        minlon=None, maxlon=None):
3884    """
3885    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3886    specified with min/max lat/long
3887
3888    Think of the latitudes and longitudes describing a 2d surface.
3889    The area returned is, if possible, just big enough to cover the
3890    inputed max/min area. (This will not be possible if the max/min area
3891    has a section outside of the latitudes/longitudes area.)
3892
3893    assume latitudess & longitudes are sorted,
3894    long - from low to high (west to east, eg 148 - 151)
3895    lat - from high to low (north to south, eg -35 - -38)
3896    """
3897
3898    # reverse order of lat, so it's in ascending order
3899    latitudes.reverse()
3900    largest_lat_index = len(latitudes)-1
3901    #Cut out a smaller extent.
3902    if minlat == None:
3903        lat_min_index = 0
3904    else:
3905        lat_min_index = searchsorted(latitudes, minlat)-1
3906        if lat_min_index <0:
3907            lat_min_index = 0
3908
3909
3910    if maxlat == None:
3911        lat_max_index = largest_lat_index #len(latitudes)
3912    else:
3913        lat_max_index = searchsorted(latitudes, maxlat)
3914        if lat_max_index > largest_lat_index:
3915            lat_max_index = largest_lat_index
3916
3917    if minlon == None:
3918        lon_min_index = 0
3919    else:
3920        lon_min_index = searchsorted(longitudes, minlon)-1
3921        if lon_min_index <0:
3922            lon_min_index = 0
3923
3924    if maxlon == None:
3925        lon_max_index = len(longitudes)
3926    else:
3927        lon_max_index = searchsorted(longitudes, maxlon)
3928
3929    #Take into account that the latitude list was reversed
3930    latitudes.reverse() # Python passes by reference, need to swap back
3931    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3932                                   largest_lat_index - lat_min_index
3933    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3934    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3935
3936    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3937
3938
3939def _read_asc(filename, verbose=False):
3940    """Read esri file from the following ASCII format (.asc)
3941
3942    Example:
3943
3944    ncols         3121
3945    nrows         1800
3946    xllcorner     722000
3947    yllcorner     5893000
3948    cellsize      25
3949    NODATA_value  -9999
3950    138.3698 137.4194 136.5062 135.5558 ..........
3951
3952    """
3953
3954    datafile = open(filename)
3955
3956    if verbose: print 'Reading DEM from %s' %(filename)
3957    lines = datafile.readlines()
3958    datafile.close()
3959
3960    if verbose: print 'Got', len(lines), ' lines'
3961
3962    ncols = int(lines.pop(0).split()[1].strip())
3963    nrows = int(lines.pop(0).split()[1].strip())
3964    xllcorner = float(lines.pop(0).split()[1].strip())
3965    yllcorner = float(lines.pop(0).split()[1].strip())
3966    cellsize = float(lines.pop(0).split()[1].strip())
3967    NODATA_value = float(lines.pop(0).split()[1].strip())
3968
3969    assert len(lines) == nrows
3970
3971    #Store data
3972    grid = []
3973
3974    n = len(lines)
3975    for i, line in enumerate(lines):
3976        cells = line.split()
3977        assert len(cells) == ncols
3978        grid.append(array([float(x) for x in cells]))
3979    grid = array(grid)
3980
3981    return {'xllcorner':xllcorner,
3982            'yllcorner':yllcorner,
3983            'cellsize':cellsize,
3984            'NODATA_value':NODATA_value}, grid
3985
[2884]3986
3987# FIXME (Ole): Is this doing anything at all?
[2852]3988def sww2timeseries(swwfile,
3989                   gauge_filename,
3990                   gauge_data_outname,
3991                   quantity = None,
3992                   time_min = None,
3993                   time_max = None,
3994                   verbose = False):
3995
3996    """Read SWW file and extract time series for prescribed quantities at
3997    gauge locations.
3998
3999    The gauge locations are defined in gauge_filename. This file should be
4000    in the form: gaugename, easting, northing, and should be stored in a
4001    .csv or .xya file.
4002
4003    Time series data at the gauges can be written to file (for example,
4004    Benfield requested raw data) with the default being no data written.
4005
4006    The parameter quantity must be the name of an existing quantity or
4007    an expression involving existing quantities. The default is
4008    'depth'.
4009
4010    The user can define a list of quantities. The possibilities are
4011    the conserved quantitues of the shallow water wave equation and other
4012    quantities which can be derived from those, i.e.
4013    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
4014
4015    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
4016    Note, units of momentum are m^2/s and depth is m.
4017
4018    Velocity is absolute momentum divided by depth. (Confirming correct units:
4019    vel = abs mom / depth = (m^2/s)/m = m/s.)
4020
4021    Bearing returns the angle of the velocity vector from North.
4022
4023    If time_min and time_max is given, output plotted in that time range.
4024    The default is to plot the entire range of the time evident in sww file.
4025
4026    The export graphic format in 'png' and will be stored in the same
4027    directory as the input file.
4028    """
4029
4030    if quantity is None: quantity = 'depth'
4031
4032    # extract gauge locations from gauge file
4033
4034    # extract all quantities from swwfile (f = file_function)
[2884]4035    if time_min is None: time_min = min(f.get_time())
4036    if time_max is None: time_max = max(f.get_time())
[2852]4037
4038    # loop through appropriate range of time
4039    # plot prescribed quantities and export data if requested
4040
4041    #if gauge_data_outname is None:
4042
4043    return
[3292]4044
4045#-------------------------------------------------------------
4046if __name__ == "__main__":
[3296]4047    pass
[3292]4048
Note: See TracBrowser for help on using the repository browser.