source: anuga_core/source/anuga/pyvolution/data_manager.py @ 3546

Last change on this file since 3546 was 3535, checked in by duncan, 19 years ago

change imports so reflect the new structure

File size: 122.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62
63from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
64     searchsorted, zeros, allclose, around
65
66
67from anuga.coordinate_transforms.geo_reference import Geo_reference
68from anuga.geospatial_data.geospatial_data import Geospatial_data
69from anuga.config import minimum_sww_depth
70
71def make_filename(s):
72    """Transform argument string into a suitable filename
73    """
74
75    s = s.strip()
76    s = s.replace(' ', '_')
77    s = s.replace('(', '')
78    s = s.replace(')', '')
79    s = s.replace('__', '_')
80
81    return s
82
83
84def check_dir(path, verbose=None):
85    """Check that specified path exists.
86    If path does not exist it will be created if possible
87
88    USAGE:
89       checkdir(path, verbose):
90
91    ARGUMENTS:
92        path -- Directory
93        verbose -- Flag verbose output (default: None)
94
95    RETURN VALUE:
96        Verified path including trailing separator
97
98    """
99
100    import os, sys
101    import os.path
102
103    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
104        unix = 0
105    else:
106        unix = 1
107
108
109    if path[-1] != os.sep:
110        path = path + os.sep  # Add separator for directories
111
112    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
113    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
114        try:
115            exitcode=os.mkdir(path)
116
117            # Change access rights if possible
118            #
119            if unix:
120                exitcode=os.system('chmod 775 '+path)
121            else:
122                pass  # FIXME: What about acces rights under Windows?
123
124            if verbose: print 'MESSAGE: Directory', path, 'created.'
125
126        except:
127            print 'WARNING: Directory', path, 'could not be created.'
128            if unix:
129                path = '/tmp/'
130            else:
131                path = 'C:'
132
133            print 'Using directory %s instead' %path
134
135    return(path)
136
137
138
139def del_dir(path):
140    """Recursively delete directory path and all its contents
141    """
142
143    import os
144
145    if os.path.isdir(path):
146        for file in os.listdir(path):
147            X = os.path.join(path, file)
148
149
150            if os.path.isdir(X) and not os.path.islink(X):
151                del_dir(X)
152            else:
153                try:
154                    os.remove(X)
155                except:
156                    print "Could not remove file %s" %X
157
158        os.rmdir(path)
159
160
161
162def create_filename(datadir, filename, format, size=None, time=None):
163
164    import os
165    #from anuga.config import data_dir
166
167    FN = check_dir(datadir) + filename
168
169    if size is not None:
170        FN += '_size%d' %size
171
172    if time is not None:
173        FN += '_time%.2f' %time
174
175    FN += '.' + format
176    return FN
177
178
179def get_files(datadir, filename, format, size):
180    """Get all file (names) with gven name, size and format
181    """
182
183    import glob
184
185    import os
186    #from anuga.config import data_dir
187
188    dir = check_dir(datadir)
189
190    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
191    return glob.glob(pattern)
192
193
194
195#Generic class for storing output to e.g. visualisation or checkpointing
196class Data_format:
197    """Generic interface to data formats
198    """
199
200
201    def __init__(self, domain, extension, mode = 'w'):
202        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
203                                        '''   'w' (write)'''+\
204                                        '''   'r' (read)''' +\
205                                        '''   'a' (append)'''
206
207        #Create filename
208        #self.filename = create_filename(domain.get_datadir(),
209        #domain.get_name(), extension, len(domain))
210
211
212        self.filename = create_filename(domain.get_datadir(),
213            domain.get_name(), extension)
214
215        #print 'F', self.filename
216        self.timestep = 0
217        self.number_of_volumes = len(domain)
218        self.domain = domain
219
220
221        #FIXME: Should we have a general set_precision function?
222
223
224
225#Class for storing output to e.g. visualisation
226class Data_format_sww(Data_format):
227    """Interface to native NetCDF format (.sww) for storing model output
228
229    There are two kinds of data
230
231    1: Constant data: Vertex coordinates and field values. Stored once
232    2: Variable data: Conserved quantities. Stored once per timestep.
233
234    All data is assumed to reside at vertex locations.
235    """
236
237
238    def __init__(self, domain, mode = 'w',\
239                 max_size = 2000000000,
240                 recursion = False):
241        from Scientific.IO.NetCDF import NetCDFFile
242        from Numeric import Int, Float, Float32
243
244        self.precision = Float32 #Use single precision for quantities
245        if hasattr(domain, 'max_size'):
246            self.max_size = domain.max_size #file size max is 2Gig
247        else:
248            self.max_size = max_size
249        self.recursion = recursion
250        self.mode = mode
251
252        Data_format.__init__(self, domain, 'sww', mode)
253
254        if hasattr(domain, 'minimum_sww_depth'):
255            self.minimum_sww_depth =  domain.minimum_sww_depth
256        else:
257            self.minimum_sww_depth = minimum_sww_depth
258
259        # NetCDF file definition
260        fid = NetCDFFile(self.filename, mode)
261
262        if mode == 'w':
263
264            #Create new file
265            fid.institution = 'Geoscience Australia'
266            fid.description = 'Output from anuga.pyvolution suitable for plotting'
267
268            if domain.smooth:
269                fid.smoothing = 'Yes'
270            else:
271                fid.smoothing = 'No'
272
273            fid.order = domain.default_order
274
275            if hasattr(domain, 'texture'):
276                fid.texture = domain.texture
277        #else:
278            #    fid.texture = 'None'
279
280            #Reference point
281            #Start time in seconds since the epoch (midnight 1/1/1970)
282            #FIXME: Use Georef
283            fid.starttime = domain.starttime
284
285            # dimension definitions
286            fid.createDimension('number_of_volumes', self.number_of_volumes)
287            fid.createDimension('number_of_vertices', 3)
288
289            if domain.smooth is True:
290                fid.createDimension('number_of_points', len(domain.vertexlist))
291            else:
292                fid.createDimension('number_of_points', 3*self.number_of_volumes)
293
294            fid.createDimension('number_of_timesteps', None) #extensible
295
296            # variable definitions
297            fid.createVariable('x', self.precision, ('number_of_points',))
298            fid.createVariable('y', self.precision, ('number_of_points',))
299            fid.createVariable('elevation', self.precision, ('number_of_points',))
300            if domain.geo_reference is not None:
301                domain.geo_reference.write_NetCDF(fid)
302
303            #FIXME: Backwards compatibility
304            fid.createVariable('z', self.precision, ('number_of_points',))
305            #################################
306
307            fid.createVariable('volumes', Int, ('number_of_volumes',
308                                                'number_of_vertices'))
309
310            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
311                                               # close to each other may appear as the same step
312                               ('number_of_timesteps',))
313
314            fid.createVariable('stage', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318            fid.createVariable('xmomentum', self.precision,
319                               ('number_of_timesteps',
320                                'number_of_points'))
321
322            fid.createVariable('ymomentum', self.precision,
323                               ('number_of_timesteps',
324                                'number_of_points'))
325
326        #Close
327        fid.close()
328
329
330    def store_connectivity(self):
331        """Specialisation of store_connectivity for net CDF format
332
333        Writes x,y,z coordinates of triangles constituting
334        the bed elevation.
335        """
336
337        from Scientific.IO.NetCDF import NetCDFFile
338
339        from Numeric import concatenate, Int
340
341        domain = self.domain
342
343        #Get NetCDF
344        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
345
346        # Get the variables
347        x = fid.variables['x']
348        y = fid.variables['y']
349        z = fid.variables['elevation']
350
351        volumes = fid.variables['volumes']
352
353        # Get X, Y and bed elevation Z
354        Q = domain.quantities['elevation']
355        X,Y,Z,V = Q.get_vertex_values(xy=True,
356                      precision = self.precision)
357
358
359
360        x[:] = X.astype(self.precision)
361        y[:] = Y.astype(self.precision)
362        z[:] = Z.astype(self.precision)
363
364        #FIXME: Backwards compatibility
365        z = fid.variables['z']
366        z[:] = Z.astype(self.precision)
367        ################################
368
369        volumes[:] = V.astype(volumes.typecode())
370
371        #Close
372        fid.close()
373
374    def store_timestep(self, names):
375        """Store time and named quantities to file
376        """
377        from Scientific.IO.NetCDF import NetCDFFile
378        import types
379        from time import sleep
380        from os import stat
381
382        minimum_sww_depth = 0.001
383        #minimum_sww_depth = 0.0  #FIXME pass in or read from domain
384        from Numeric import choose
385
386        #Get NetCDF
387        retries = 0
388        file_open = False
389        while not file_open and retries < 10:
390            try:
391                fid = NetCDFFile(self.filename, 'a') #Open existing file
392            except IOError:
393                #This could happen if someone was reading the file.
394                #In that case, wait a while and try again
395                msg = 'Warning (store_timestep): File %s could not be opened'\
396                      %self.filename
397                msg += ' - trying step %s again' %self.domain.time
398                print msg
399                retries += 1
400                sleep(1)
401            else:
402                file_open = True
403
404        if not file_open:
405            msg = 'File %s could not be opened for append' %self.filename
406            raise DataFileNotOpenError, msg
407
408
409
410        #Check to see if the file is already too big:
411        time = fid.variables['time']
412        i = len(time)+1
413        file_size = stat(self.filename)[6]
414        file_size_increase =  file_size/i
415        if file_size + file_size_increase > self.max_size*(2**self.recursion):
416            #in order to get the file name and start time correct,
417            #I change the domian.filename and domain.starttime.
418            #This is the only way to do this without changing
419            #other modules (I think).
420
421            #write a filename addon that won't break swollens reader
422            #(10.sww is bad)
423            filename_ext = '_time_%s'%self.domain.time
424            filename_ext = filename_ext.replace('.', '_')
425            #remember the old filename, then give domain a
426            #name with the extension
427            old_domain_filename = self.domain.filename
428            if not self.recursion:
429                self.domain.filename = self.domain.filename+filename_ext
430
431            #change the domain starttime to the current time
432            old_domain_starttime = self.domain.starttime
433            self.domain.starttime = self.domain.time
434
435            #build a new data_structure.
436            next_data_structure=\
437                Data_format_sww(self.domain, mode=self.mode,\
438                                max_size = self.max_size,\
439                                recursion = self.recursion+1)
440            if not self.recursion:
441                print '    file_size = %s'%file_size
442                print '    saving file to %s'%next_data_structure.filename
443            #set up the new data_structure
444            self.domain.writer = next_data_structure
445
446            #FIXME - could be cleaner to use domain.store_timestep etc.
447            next_data_structure.store_connectivity()
448            next_data_structure.store_timestep(names)
449            fid.sync()
450            fid.close()
451
452            #restore the old starttime and filename
453            self.domain.starttime = old_domain_starttime
454            self.domain.filename = old_domain_filename
455        else:
456            self.recursion = False
457            domain = self.domain
458
459            # Get the variables
460            time = fid.variables['time']
461            stage = fid.variables['stage']
462            xmomentum = fid.variables['xmomentum']
463            ymomentum = fid.variables['ymomentum']
464            i = len(time)
465
466            #Store time
467            time[i] = self.domain.time
468
469
470            if type(names) not in [types.ListType, types.TupleType]:
471                names = [names]
472
473            for name in names:
474                # Get quantity
475                Q = domain.quantities[name]
476                A,V = Q.get_vertex_values(xy = False,
477                                          precision = self.precision)
478
479                #FIXME: Make this general (see below)
480                if name == 'stage':
481                    z = fid.variables['elevation']
482                    #print z[:]
483                    #print A-z[:]
484                    A = choose( A-z[:] >= self.minimum_sww_depth,
485                                (z[:], A))
486                    stage[i,:] = A.astype(self.precision)
487                elif name == 'xmomentum':
488                    xmomentum[i,:] = A.astype(self.precision)
489                elif name == 'ymomentum':
490                    ymomentum[i,:] = A.astype(self.precision)
491
492                #As in....
493            #eval( name + '[i,:] = A.astype(self.precision)' )
494            #FIXME: But we need a UNIT test for that before refactoring
495
496
497
498            #Flush and close
499            fid.sync()
500            fid.close()
501
502
503
504#Class for handling checkpoints data
505class Data_format_cpt(Data_format):
506    """Interface to native NetCDF format (.cpt)
507    """
508
509
510    def __init__(self, domain, mode = 'w'):
511        from Scientific.IO.NetCDF import NetCDFFile
512        from Numeric import Int, Float, Float
513
514        self.precision = Float #Use full precision
515
516        Data_format.__init__(self, domain, 'sww', mode)
517
518
519        # NetCDF file definition
520        fid = NetCDFFile(self.filename, mode)
521
522        if mode == 'w':
523            #Create new file
524            fid.institution = 'Geoscience Australia'
525            fid.description = 'Checkpoint data'
526            #fid.smooth = domain.smooth
527            fid.order = domain.default_order
528
529            # dimension definitions
530            fid.createDimension('number_of_volumes', self.number_of_volumes)
531            fid.createDimension('number_of_vertices', 3)
532
533            #Store info at all vertices (no smoothing)
534            fid.createDimension('number_of_points', 3*self.number_of_volumes)
535            fid.createDimension('number_of_timesteps', None) #extensible
536
537            # variable definitions
538
539            #Mesh
540            fid.createVariable('x', self.precision, ('number_of_points',))
541            fid.createVariable('y', self.precision, ('number_of_points',))
542
543
544            fid.createVariable('volumes', Int, ('number_of_volumes',
545                                                'number_of_vertices'))
546
547            fid.createVariable('time', self.precision,
548                               ('number_of_timesteps',))
549
550            #Allocate space for all quantities
551            for name in domain.quantities.keys():
552                fid.createVariable(name, self.precision,
553                                   ('number_of_timesteps',
554                                    'number_of_points'))
555
556        #Close
557        fid.close()
558
559
560    def store_checkpoint(self):
561        """
562        Write x,y coordinates of triangles.
563        Write connectivity (
564        constituting
565        the bed elevation.
566        """
567
568        from Scientific.IO.NetCDF import NetCDFFile
569
570        from Numeric import concatenate
571
572        domain = self.domain
573
574        #Get NetCDF
575        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
576
577        # Get the variables
578        x = fid.variables['x']
579        y = fid.variables['y']
580
581        volumes = fid.variables['volumes']
582
583        # Get X, Y and bed elevation Z
584        Q = domain.quantities['elevation']
585        X,Y,Z,V = Q.get_vertex_values(xy=True,
586                      precision = self.precision)
587
588
589
590        x[:] = X.astype(self.precision)
591        y[:] = Y.astype(self.precision)
592        z[:] = Z.astype(self.precision)
593
594        volumes[:] = V
595
596        #Close
597        fid.close()
598
599
600    def store_timestep(self, name):
601        """Store time and named quantity to file
602        """
603        from Scientific.IO.NetCDF import NetCDFFile
604        from time import sleep
605
606        #Get NetCDF
607        retries = 0
608        file_open = False
609        while not file_open and retries < 10:
610            try:
611                fid = NetCDFFile(self.filename, 'a') #Open existing file
612            except IOError:
613                #This could happen if someone was reading the file.
614                #In that case, wait a while and try again
615                msg = 'Warning (store_timestep): File %s could not be opened'\
616                  %self.filename
617                msg += ' - trying again'
618                print msg
619                retries += 1
620                sleep(1)
621            else:
622                file_open = True
623
624        if not file_open:
625            msg = 'File %s could not be opened for append' %self.filename
626            raise DataFileNotOPenError, msg
627
628
629        domain = self.domain
630
631        # Get the variables
632        time = fid.variables['time']
633        stage = fid.variables['stage']
634        i = len(time)
635
636        #Store stage
637        time[i] = self.domain.time
638
639        # Get quantity
640        Q = domain.quantities[name]
641        A,V = Q.get_vertex_values(xy=False,
642                  precision = self.precision)
643
644        stage[i,:] = A.astype(self.precision)
645
646        #Flush and close
647        fid.sync()
648        fid.close()
649
650
651
652
653
654#Function for storing xya output
655#FIXME Not done yet for this version
656#This is obsolete.  Use geo_spatial_data instead
657class Data_format_xya(Data_format):
658    """Generic interface to data formats
659    """
660
661
662    def __init__(self, domain, mode = 'w'):
663        from Scientific.IO.NetCDF import NetCDFFile
664        from Numeric import Int, Float, Float32
665
666        self.precision = Float32 #Use single precision
667
668        Data_format.__init__(self, domain, 'xya', mode)
669
670
671
672    #FIXME -This is the old xya format
673    def store_all(self):
674        """Specialisation of store all for xya format
675
676        Writes x,y,z coordinates of triangles constituting
677        the bed elevation.
678        """
679
680        from Numeric import concatenate
681
682        domain = self.domain
683
684        fd = open(self.filename, 'w')
685
686
687        if domain.smooth is True:
688            number_of_points =  len(domain.vertexlist)
689        else:
690            number_of_points = 3*self.number_of_volumes
691
692        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
693
694        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
695                 " # <vertex #> <x> <y> [attributes]" + "\n")
696
697
698        # Get X, Y, bed elevation and friction (index=0,1)
699        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
700                                           indices = (0,1), precision = self.precision)
701
702        bed_eles = A[:,0]
703        fricts = A[:,1]
704
705        # Get stage (index=0)
706        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
707                                       indices = (0,), precision = self.precision)
708
709        stages = B[:,0]
710
711        #<vertex #> <x> <y> [attributes]
712        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
713                                               stages, fricts):
714
715            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
716            fd.write(s)
717
718        #close
719        fd.close()
720
721
722    def store_timestep(self, t, V0, V1, V2):
723        """Store time, water heights (and momentums) to file
724        """
725        pass
726
727
728#### NBED national exposure database
729   
730LAT_TITLE = 'LATITUDE'
731LONG_TITLE = 'LONGITUDE'
732X_TITLE = 'x'
733Y_TITLE = 'y'
734class Exposure_csv:
735    def __init__(self,file_name, latitude_title=LAT_TITLE,
736                 longitude_title=LONG_TITLE, is_x_y_locations=None,
737                 x_title=X_TITLE, y_title=Y_TITLE,
738                 refine_polygon=None):
739        """
740        This class is for handling the exposure csv file.
741        It reads the file in and converts the lats and longs to a geospatial
742        data object.
743        Use the methods to read and write columns.
744
745        The format of the csv files it reads is;
746           The first row is a title row.
747           comma's are the delimiters
748           each column is a 'set' of data
749
750        Feel free to use/expand it to read other csv files.
751           
752           
753        It is not for adding and deleting rows
754       
755        Can geospatial handle string attributes? It's not made for them.
756        Currently it can't load and save string att's.
757
758        So just use geospatial to hold the x, y and georef? Bad, since
759        different att's are in diferent structures.  Not so bad, the info
760        to write if the .csv file is saved is in attribute_dic
761
762        The location info is in the geospatial attribute.
763       
764       
765        """
766        self._file_name = file_name
767        self._geospatial = None #
768
769        # self._attribute_dic is a dictionary.
770        #The keys are the column titles.
771        #The values are lists of column data
772       
773        # self._title_index_dic is a dictionary.
774        #The keys are the column titles.
775        #The values are the index positions of file columns.
776        self._attribute_dic, self._title_index_dic = \
777        self._load_exposure_csv(self._file_name)
778        try:
779            lats = self._attribute_dic[latitude_title]
780            longs = self._attribute_dic[longitude_title]
781           
782        except KeyError:
783            # maybe a warning..
784            #Let's see if this works..
785            if False != is_x_y_locations:
786                is_x_y_locations = True
787            pass
788        else:
789            self._geospatial = Geospatial_data(latitudes = lats,
790                 longitudes = longs)
791
792        if is_x_y_locations is True:
793            if self._geospatial is not None:
794                pass #fixme throw an error
795            try:
796                xs = self._attribute_dic[x_title]
797                ys = self._attribute_dic[y_title]
798                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
799            except KeyError:
800                # maybe a warning..
801                raise TitleValueError, msg
802            else:
803                self._geospatial = Geospatial_data(data_points=points)
804           
805        # create a list of points that are in the refining_polygon
806        # described by a list of indexes representing the points
807
808    def __cmp__(self, other):
809        #print "self._attribute_dic",self._attribute_dic
810        #print "other._attribute_dic",other._attribute_dic
811        #print "self._title_index_dic", self._title_index_dic
812        #print "other._title_index_dic", other._title_index_dic
813       
814        #check that a is an instance of this class
815        if isinstance(self, type(other)):
816            result = cmp(self._attribute_dic, other._attribute_dic)
817            if result <>0:
818                return result
819            # The order of the columns is important. Therefore..
820            result = cmp(self._title_index_dic, other._title_index_dic)
821            if result <>0:
822                return result
823            for self_ls, other_ls in map(None,self._attribute_dic, \
824                    other._attribute_dic):
825                result = cmp(self._attribute_dic[self_ls],
826                             other._attribute_dic[other_ls])
827                if result <>0:
828                    return result
829            return 0
830        else:
831            return 1
832   
833    def _load_exposure_csv(self, file_name):
834        """
835        Load in the csv as a dic, title as key and column info as value, .
836        Also, create a dic, title as key and column index as value.
837        """
838        #
839        attribute_dic = {}
840        title_index_dic = {}
841        titles_stripped = [] # list of titles
842       
843        reader = csv.reader(file(file_name))
844
845        # Read in and manipulate the title info
846        titles = reader.next()
847        for i,title in enumerate(titles):
848            titles_stripped.append(title.strip())
849            title_index_dic[title.strip()] = i
850        title_count = len(titles_stripped)       
851        #print "title_index_dic",title_index_dic
852
853       
854        #create a dic of colum values, indexed by column title
855        for line in reader:
856            if len(line) <> title_count:
857                raise IOError #FIXME make this nicer
858            for i, value in enumerate(line):
859                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
860           
861        return attribute_dic, title_index_dic
862
863    def get_column(self, column_name, use_refind_polygon=False):
864        """
865        Given a column name return a list of the column values
866
867        Note, the type of the values will be String!
868        do this to change a list of strings to a list of floats
869        time = [float(x) for x in time]
870       
871        Not implemented:
872        if use_refind_polygon is True, only return values in the
873        refined polygon
874        """
875        if not self._attribute_dic.has_key(column_name):
876            msg = 'Therer is  no column called %s!' %column_name
877            raise TitleValueError, msg
878        return self._attribute_dic[column_name]
879
880
881    def get_value(self, value_column_name,
882                  known_column_name,
883                  known_values,
884                  use_refind_polygon=False):
885        """
886        Do linear interpolation on the known_colum, using the known_value,
887        to return a value of the column_value_name.
888        """
889        pass
890
891
892    def get_location(self, use_refind_polygon=False):
893        """
894        Return a geospatial object which describes the
895        locations of the location file.
896
897        Note, if there is not location info, this returns None.
898       
899        Not implemented:
900        if use_refind_polygon is True, only return values in the
901        refined polygon
902        """
903        return self._geospatial
904
905    def set_column(self, column_name, column_values, overwrite=False):
906        """
907        Add a column to the 'end' (with the right most column being the end)
908        of the csv file.
909
910        Set overwrite to True if you want to overwrite a column.
911       
912        Note, in column_name white space is removed and case is not checked.
913        Precondition
914        The column_name and column_values cannot have comma's in it.
915        """
916       
917        value_row_count = \
918            len(self._attribute_dic[self._title_index_dic.keys()[0]])
919        if len(column_values) <> value_row_count: 
920            msg = 'The number of column values must equal the number of rows.'
921            raise DataMissingValuesError, msg
922       
923        if self._attribute_dic.has_key(column_name):
924            if not overwrite:
925                msg = 'Column name %s already in use!' %column_name
926                raise TitleValueError, msg
927        else:
928            # New title.  Add it to the title index.
929            self._title_index_dic[column_name] = len(self._title_index_dic)
930        self._attribute_dic[column_name] = column_values
931        #print "self._title_index_dic[column_name]",self._title_index_dic
932
933    def save(self, file_name=None):
934        """
935        Save the exposure csv file
936        """
937        if file_name is None:
938            file_name = self._file_name
939       
940        fd = open(file_name,'wb')
941        writer = csv.writer(fd)
942       
943        #Write the title to a cvs file
944        line = [None]* len(self._title_index_dic)
945        for title in self._title_index_dic.iterkeys():
946            line[self._title_index_dic[title]]= title
947        writer.writerow(line)
948       
949        # Write the values to a cvs file
950        value_row_count = \
951            len(self._attribute_dic[self._title_index_dic.keys()[0]])
952        for row_i in range(value_row_count):
953            line = [None]* len(self._title_index_dic)
954            for title in self._title_index_dic.iterkeys():
955                line[self._title_index_dic[title]]= \
956                     self._attribute_dic[title][row_i]
957            writer.writerow(line)
958
959
960#Auxiliary
961def write_obj(filename,x,y,z):
962    """Store x,y,z vectors into filename (obj format)
963       Vectors are assumed to have dimension (M,3) where
964       M corresponds to the number elements.
965       triangles are assumed to be disconnected
966
967       The three numbers in each vector correspond to three vertices,
968
969       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
970
971    """
972    #print 'Writing obj to %s' % filename
973
974    import os.path
975
976    root, ext = os.path.splitext(filename)
977    if ext == '.obj':
978        FN = filename
979    else:
980        FN = filename + '.obj'
981
982
983    outfile = open(FN, 'wb')
984    outfile.write("# Triangulation as an obj file\n")
985
986    M, N = x.shape
987    assert N==3  #Assuming three vertices per element
988
989    for i in range(M):
990        for j in range(N):
991            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
992
993    for i in range(M):
994        base = i*N
995        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
996
997    outfile.close()
998
999
1000#########################################################
1001#Conversion routines
1002########################################################
1003
1004def sww2obj(basefilename, size):
1005    """Convert netcdf based data output to obj
1006    """
1007    from Scientific.IO.NetCDF import NetCDFFile
1008
1009    from Numeric import Float, zeros
1010
1011    #Get NetCDF
1012    FN = create_filename('.', basefilename, 'sww', size)
1013    print 'Reading from ', FN
1014    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1015
1016
1017    # Get the variables
1018    x = fid.variables['x']
1019    y = fid.variables['y']
1020    z = fid.variables['elevation']
1021    time = fid.variables['time']
1022    stage = fid.variables['stage']
1023
1024    M = size  #Number of lines
1025    xx = zeros((M,3), Float)
1026    yy = zeros((M,3), Float)
1027    zz = zeros((M,3), Float)
1028
1029    for i in range(M):
1030        for j in range(3):
1031            xx[i,j] = x[i+j*M]
1032            yy[i,j] = y[i+j*M]
1033            zz[i,j] = z[i+j*M]
1034
1035    #Write obj for bathymetry
1036    FN = create_filename('.', basefilename, 'obj', size)
1037    write_obj(FN,xx,yy,zz)
1038
1039
1040    #Now read all the data with variable information, combine with
1041    #x,y info and store as obj
1042
1043    for k in range(len(time)):
1044        t = time[k]
1045        print 'Processing timestep %f' %t
1046
1047        for i in range(M):
1048            for j in range(3):
1049                zz[i,j] = stage[k,i+j*M]
1050
1051
1052        #Write obj for variable data
1053        #FN = create_filename(basefilename, 'obj', size, time=t)
1054        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1055        write_obj(FN,xx,yy,zz)
1056
1057
1058def dat2obj(basefilename):
1059    """Convert line based data output to obj
1060    FIXME: Obsolete?
1061    """
1062
1063    import glob, os
1064    from anuga.config import data_dir
1065
1066
1067    #Get bathymetry and x,y's
1068    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1069
1070    from Numeric import zeros, Float
1071
1072    M = len(lines)  #Number of lines
1073    x = zeros((M,3), Float)
1074    y = zeros((M,3), Float)
1075    z = zeros((M,3), Float)
1076
1077    ##i = 0
1078    for i, line in enumerate(lines):
1079        tokens = line.split()
1080        values = map(float,tokens)
1081
1082        for j in range(3):
1083            x[i,j] = values[j*3]
1084            y[i,j] = values[j*3+1]
1085            z[i,j] = values[j*3+2]
1086
1087        ##i += 1
1088
1089
1090    #Write obj for bathymetry
1091    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1092
1093
1094    #Now read all the data files with variable information, combine with
1095    #x,y info
1096    #and store as obj
1097
1098    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1099
1100    for filename in files:
1101        print 'Processing %s' % filename
1102
1103        lines = open(data_dir+os.sep+filename,'r').readlines()
1104        assert len(lines) == M
1105        root, ext = os.path.splitext(filename)
1106
1107        #Get time from filename
1108        i0 = filename.find('_time=')
1109        if i0 == -1:
1110            #Skip bathymetry file
1111            continue
1112
1113        i0 += 6  #Position where time starts
1114        i1 = filename.find('.dat')
1115
1116        if i1 > i0:
1117            t = float(filename[i0:i1])
1118        else:
1119            raise DataTimeError, 'Hmmmm'
1120
1121
1122
1123        ##i = 0
1124        for i, line in enumerate(lines):
1125            tokens = line.split()
1126            values = map(float,tokens)
1127
1128            for j in range(3):
1129                z[i,j] = values[j]
1130
1131            ##i += 1
1132
1133        #Write obj for variable data
1134        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1135
1136
1137def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1138    """Read netcdf filename1, pick timesteps first:step:last and save to
1139    nettcdf file filename2
1140    """
1141    from Scientific.IO.NetCDF import NetCDFFile
1142
1143    #Get NetCDF
1144    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1145    outfile = NetCDFFile(filename2, 'w')  #Open new file
1146
1147
1148    #Copy dimensions
1149    for d in infile.dimensions:
1150        outfile.createDimension(d, infile.dimensions[d])
1151
1152    for name in infile.variables:
1153        var = infile.variables[name]
1154        outfile.createVariable(name, var.typecode(), var.dimensions)
1155
1156
1157    #Copy the static variables
1158    for name in infile.variables:
1159        if name == 'time' or name == 'stage':
1160            pass
1161        else:
1162            #Copy
1163            outfile.variables[name][:] = infile.variables[name][:]
1164
1165    #Copy selected timesteps
1166    time = infile.variables['time']
1167    stage = infile.variables['stage']
1168
1169    newtime = outfile.variables['time']
1170    newstage = outfile.variables['stage']
1171
1172    if last is None:
1173        last = len(time)
1174
1175    selection = range(first, last, step)
1176    for i, j in enumerate(selection):
1177        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1178        newtime[i] = time[j]
1179        newstage[i,:] = stage[j,:]
1180
1181    #Close
1182    infile.close()
1183    outfile.close()
1184
1185
1186#Get data objects
1187def get_dataobject(domain, mode='w'):
1188    """Return instance of class of given format using filename
1189    """
1190
1191    cls = eval('Data_format_%s' %domain.format)
1192    return cls(domain, mode)
1193
1194#FIXME move into geospatial.  There should only be one method that
1195# reads xya, and writes pts.
1196def xya2pts(basename_in, basename_out=None, verbose=False,
1197            #easting_min=None, easting_max=None,
1198            #northing_min=None, northing_max=None,
1199            stride = 1,
1200            attribute_name = 'elevation',
1201            z_func = None):
1202    """Read points data from ascii (.xya)
1203
1204    Example:
1205
1206              x(m)        y(m)        z(m)
1207         0.00000e+00  0.00000e+00  1.3535000e-01
1208         0.00000e+00  1.40000e-02  1.3535000e-01
1209
1210
1211
1212    Convert to NetCDF pts format which is
1213
1214    points:  (Nx2) Float array
1215    elevation: N Float array
1216
1217    Only lines that contain three numeric values are processed
1218
1219    If z_func is specified, it will be applied to the third field
1220    """
1221
1222    import os
1223    #from Scientific.IO.NetCDF import NetCDFFile
1224    from Numeric import Float, arrayrange, concatenate
1225
1226    root, ext = os.path.splitext(basename_in)
1227
1228    if ext == '': ext = '.xya'
1229
1230    #Get NetCDF
1231    infile = open(root + ext, 'r')  #Open existing xya file for read
1232
1233    if verbose: print 'Reading xya points from %s' %(root + ext)
1234
1235    points = []
1236    attribute = []
1237    for i, line in enumerate(infile.readlines()):
1238
1239        if i % stride != 0: continue
1240
1241        fields = line.split()
1242
1243        try:
1244            assert len(fields) == 3
1245        except:
1246            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1247
1248        try:
1249            x = float( fields[0] )
1250            y = float( fields[1] )
1251            z = float( fields[2] )
1252        except:
1253            continue
1254
1255        points.append( [x, y] )
1256
1257        if callable(z_func):
1258            attribute.append(z_func(z))
1259        else:
1260            attribute.append(z)
1261
1262
1263    #Get output file
1264    if basename_out == None:
1265        ptsname = root + '.pts'
1266    else:
1267        ptsname = basename_out + '.pts'
1268
1269    if verbose: print 'Store to NetCDF file %s' %ptsname
1270    write_ptsfile(ptsname, points, attribute, attribute_name)
1271
1272
1273
1274######Obsoleted by export_points in load_mesh
1275def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1276                  zone=None, xllcorner=None, yllcorner=None):
1277    """Write points and associated attribute to pts (NetCDF) format
1278    """
1279
1280    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1281
1282    from Numeric import Float
1283
1284    if attribute_name is None:
1285        attribute_name = 'attribute'
1286
1287
1288    from Scientific.IO.NetCDF import NetCDFFile
1289
1290    # NetCDF file definition
1291    outfile = NetCDFFile(ptsname, 'w')
1292
1293
1294    #Create new file
1295    outfile.institution = 'Geoscience Australia'
1296    outfile.description = 'NetCDF pts format for compact and '\
1297                          'portable storage of spatial point data'
1298
1299
1300    #Georeferencing
1301    from anuga.coordinate_transforms.geo_reference import Geo_reference
1302    if zone is None:
1303        assert xllcorner is None, 'xllcorner must be None'
1304        assert yllcorner is None, 'yllcorner must be None'
1305        Geo_reference().write_NetCDF(outfile)
1306    else:
1307        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1308
1309
1310
1311    outfile.createDimension('number_of_points', len(points))
1312    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1313
1314    # variable definitions
1315    outfile.createVariable('points', Float, ('number_of_points',
1316                                             'number_of_dimensions'))
1317    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1318
1319    # Get handles to the variables
1320    nc_points = outfile.variables['points']
1321    nc_attribute = outfile.variables[attribute_name]
1322
1323    #Store data
1324    nc_points[:, :] = points
1325    nc_attribute[:] = attribute
1326
1327    outfile.close()
1328
1329
1330def dem2pts(basename_in, basename_out=None,
1331            easting_min=None, easting_max=None,
1332            northing_min=None, northing_max=None,
1333            use_cache=False, verbose=False,):
1334    """Read Digitial Elevation model from the following NetCDF format (.dem)
1335
1336    Example:
1337
1338    ncols         3121
1339    nrows         1800
1340    xllcorner     722000
1341    yllcorner     5893000
1342    cellsize      25
1343    NODATA_value  -9999
1344    138.3698 137.4194 136.5062 135.5558 ..........
1345
1346    Convert to NetCDF pts format which is
1347
1348    points:  (Nx2) Float array
1349    elevation: N Float array
1350    """
1351
1352
1353
1354    kwargs = {'basename_out': basename_out,
1355              'easting_min': easting_min,
1356              'easting_max': easting_max,
1357              'northing_min': northing_min,
1358              'northing_max': northing_max,
1359              'verbose': verbose}
1360
1361    if use_cache is True:
1362        from caching import cache
1363        result = cache(_dem2pts, basename_in, kwargs,
1364                       dependencies = [basename_in + '.dem'],
1365                       verbose = verbose)
1366
1367    else:
1368        result = apply(_dem2pts, [basename_in], kwargs)
1369
1370    return result
1371
1372
1373def _dem2pts(basename_in, basename_out=None, verbose=False,
1374            easting_min=None, easting_max=None,
1375            northing_min=None, northing_max=None):
1376    """Read Digitial Elevation model from the following NetCDF format (.dem)
1377
1378    Internal function. See public function dem2pts for details.
1379    """
1380
1381    #FIXME: Can this be written feasibly using write_pts?
1382
1383    import os
1384    from Scientific.IO.NetCDF import NetCDFFile
1385    from Numeric import Float, zeros, reshape, sum
1386
1387    root = basename_in
1388
1389    #Get NetCDF
1390    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1391
1392    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1393
1394    ncols = infile.ncols[0]
1395    nrows = infile.nrows[0]
1396    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1397    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1398    cellsize = infile.cellsize[0]
1399    NODATA_value = infile.NODATA_value[0]
1400    dem_elevation = infile.variables['elevation']
1401
1402    zone = infile.zone[0]
1403    false_easting = infile.false_easting[0]
1404    false_northing = infile.false_northing[0]
1405
1406    #Text strings
1407    projection = infile.projection
1408    datum = infile.datum
1409    units = infile.units
1410
1411
1412    #Get output file
1413    if basename_out == None:
1414        ptsname = root + '.pts'
1415    else:
1416        ptsname = basename_out + '.pts'
1417
1418    if verbose: print 'Store to NetCDF file %s' %ptsname
1419    # NetCDF file definition
1420    outfile = NetCDFFile(ptsname, 'w')
1421
1422    #Create new file
1423    outfile.institution = 'Geoscience Australia'
1424    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1425                      'of spatial point data'
1426    #assign default values
1427    if easting_min is None: easting_min = xllcorner
1428    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1429    if northing_min is None: northing_min = yllcorner
1430    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1431
1432    #compute offsets to update georeferencing
1433    easting_offset = xllcorner - easting_min
1434    northing_offset = yllcorner - northing_min
1435
1436    #Georeferencing
1437    outfile.zone = zone
1438    outfile.xllcorner = easting_min #Easting of lower left corner
1439    outfile.yllcorner = northing_min #Northing of lower left corner
1440    outfile.false_easting = false_easting
1441    outfile.false_northing = false_northing
1442
1443    outfile.projection = projection
1444    outfile.datum = datum
1445    outfile.units = units
1446
1447
1448    #Grid info (FIXME: probably not going to be used, but heck)
1449    outfile.ncols = ncols
1450    outfile.nrows = nrows
1451
1452    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1453    totalnopoints = nrows*ncols
1454
1455    # calculating number of NODATA_values for each row in clipped region
1456    #FIXME: use array operations to do faster
1457    nn = 0
1458    k = 0
1459    i1_0 = 0
1460    j1_0 = 0
1461    thisj = 0
1462    thisi = 0
1463    for i in range(nrows):
1464        y = (nrows-i-1)*cellsize + yllcorner
1465        for j in range(ncols):
1466            x = j*cellsize + xllcorner
1467            if easting_min <= x <= easting_max and \
1468               northing_min <= y <= northing_max:
1469                thisj = j
1470                thisi = i
1471                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1472
1473                if k == 0:
1474                    i1_0 = i
1475                    j1_0 = j
1476                k += 1
1477
1478    index1 = j1_0
1479    index2 = thisj
1480
1481    # dimension definitions
1482    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1483    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1484
1485    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1486    nopoints = clippednopoints-nn
1487
1488    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1489
1490    if verbose and nn > 0:
1491        print 'There are %d values in the elevation' %totalnopoints
1492        print 'There are %d values in the clipped elevation' %clippednopoints
1493        print 'There are %d NODATA_values in the clipped elevation' %nn
1494
1495    outfile.createDimension('number_of_points', nopoints)
1496    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1497
1498    # variable definitions
1499    outfile.createVariable('points', Float, ('number_of_points',
1500                                             'number_of_dimensions'))
1501    outfile.createVariable('elevation', Float, ('number_of_points',))
1502
1503    # Get handles to the variables
1504    points = outfile.variables['points']
1505    elevation = outfile.variables['elevation']
1506
1507    lenv = index2-index1+1
1508    #Store data
1509    global_index = 0
1510    #for i in range(nrows):
1511    for i in range(i1_0,thisi+1,1):
1512        if verbose and i%((nrows+10)/10)==0:
1513            print 'Processing row %d of %d' %(i, nrows)
1514
1515        lower_index = global_index
1516
1517        v = dem_elevation_r[i,index1:index2+1]
1518        no_NODATA = sum(v == NODATA_value)
1519        if no_NODATA > 0:
1520            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1521        else:
1522            newcols = lenv #ncols_in_bounding_box
1523
1524        telev = zeros(newcols, Float)
1525        tpoints = zeros((newcols, 2), Float)
1526
1527        local_index = 0
1528
1529        y = (nrows-i-1)*cellsize + yllcorner
1530        #for j in range(ncols):
1531        for j in range(j1_0,index2+1,1):
1532
1533            x = j*cellsize + xllcorner
1534            if easting_min <= x <= easting_max and \
1535               northing_min <= y <= northing_max and \
1536               dem_elevation_r[i,j] <> NODATA_value:
1537                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1538                telev[local_index] = dem_elevation_r[i, j]
1539                global_index += 1
1540                local_index += 1
1541
1542        upper_index = global_index
1543
1544        if upper_index == lower_index + newcols:
1545            points[lower_index:upper_index, :] = tpoints
1546            elevation[lower_index:upper_index] = telev
1547
1548    assert global_index == nopoints, 'index not equal to number of points'
1549
1550    infile.close()
1551    outfile.close()
1552
1553
1554
1555def _read_hecras_cross_sections(lines):
1556    """Return block of surface lines for each cross section
1557    Starts with SURFACE LINE,
1558    Ends with END CROSS-SECTION
1559    """
1560
1561    points = []
1562
1563    reading_surface = False
1564    for i, line in enumerate(lines):
1565
1566        if len(line.strip()) == 0:    #Ignore blanks
1567            continue
1568
1569        if lines[i].strip().startswith('SURFACE LINE'):
1570            reading_surface = True
1571            continue
1572
1573        if lines[i].strip().startswith('END') and reading_surface:
1574            yield points
1575            reading_surface = False
1576            points = []
1577
1578        if reading_surface:
1579            fields = line.strip().split(',')
1580            easting = float(fields[0])
1581            northing = float(fields[1])
1582            elevation = float(fields[2])
1583            points.append([easting, northing, elevation])
1584
1585
1586
1587
1588def hecras_cross_sections2pts(basename_in,
1589                              basename_out=None,
1590                              verbose=False):
1591    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1592
1593    Example:
1594
1595
1596# RAS export file created on Mon 15Aug2005 11:42
1597# by HEC-RAS Version 3.1.1
1598
1599BEGIN HEADER:
1600  UNITS: METRIC
1601  DTM TYPE: TIN
1602  DTM: v:\1\cit\perth_topo\river_tin
1603  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1604  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1605  MAP PROJECTION: UTM
1606  PROJECTION ZONE: 50
1607  DATUM: AGD66
1608  VERTICAL DATUM:
1609  NUMBER OF REACHES:  19
1610  NUMBER OF CROSS-SECTIONS:  14206
1611END HEADER:
1612
1613
1614Only the SURFACE LINE data of the following form will be utilised
1615
1616  CROSS-SECTION:
1617    STREAM ID:Southern-Wungong
1618    REACH ID:Southern-Wungong
1619    STATION:19040.*
1620    CUT LINE:
1621      405548.671603161 , 6438142.7594925
1622      405734.536092045 , 6438326.10404912
1623      405745.130459356 , 6438331.48627354
1624      405813.89633823 , 6438368.6272789
1625    SURFACE LINE:
1626     405548.67,   6438142.76,   35.37
1627     405552.24,   6438146.28,   35.41
1628     405554.78,   6438148.78,   35.44
1629     405555.80,   6438149.79,   35.44
1630     405559.37,   6438153.31,   35.45
1631     405560.88,   6438154.81,   35.44
1632     405562.93,   6438156.83,   35.42
1633     405566.50,   6438160.35,   35.38
1634     405566.99,   6438160.83,   35.37
1635     ...
1636   END CROSS-SECTION
1637
1638    Convert to NetCDF pts format which is
1639
1640    points:  (Nx2) Float array
1641    elevation: N Float array
1642    """
1643
1644    #FIXME: Can this be written feasibly using write_pts?
1645
1646    import os
1647    from Scientific.IO.NetCDF import NetCDFFile
1648    from Numeric import Float, zeros, reshape
1649
1650    root = basename_in
1651
1652    #Get ASCII file
1653    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1654
1655    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1656
1657    lines = infile.readlines()
1658    infile.close()
1659
1660    if verbose: print 'Converting to pts format'
1661
1662    i = 0
1663    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1664        i += 1
1665
1666    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1667    i += 1
1668
1669    assert lines[i].strip().upper().startswith('UNITS:')
1670    units = lines[i].strip().split()[1]
1671    i += 1
1672
1673    assert lines[i].strip().upper().startswith('DTM TYPE:')
1674    i += 1
1675
1676    assert lines[i].strip().upper().startswith('DTM:')
1677    i += 1
1678
1679    assert lines[i].strip().upper().startswith('STREAM')
1680    i += 1
1681
1682    assert lines[i].strip().upper().startswith('CROSS')
1683    i += 1
1684
1685    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1686    projection = lines[i].strip().split(':')[1]
1687    i += 1
1688
1689    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1690    zone = int(lines[i].strip().split(':')[1])
1691    i += 1
1692
1693    assert lines[i].strip().upper().startswith('DATUM:')
1694    datum = lines[i].strip().split(':')[1]
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1704    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1705    i += 1
1706
1707
1708    #Now read all points
1709    points = []
1710    elevation = []
1711    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1712        for k, entry in enumerate(entries):
1713            points.append(entry[:2])
1714            elevation.append(entry[2])
1715
1716
1717    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1718          %(j+1, number_of_cross_sections)
1719    assert j+1 == number_of_cross_sections, msg
1720
1721    #Get output file
1722    if basename_out == None:
1723        ptsname = root + '.pts'
1724    else:
1725        ptsname = basename_out + '.pts'
1726
1727    #FIXME (DSG-ON): use loadASCII export_points_file
1728    if verbose: print 'Store to NetCDF file %s' %ptsname
1729    # NetCDF file definition
1730    outfile = NetCDFFile(ptsname, 'w')
1731
1732    #Create new file
1733    outfile.institution = 'Geoscience Australia'
1734    outfile.description = 'NetCDF pts format for compact and portable ' +\
1735                          'storage of spatial point data derived from HEC-RAS'
1736
1737    #Georeferencing
1738    outfile.zone = zone
1739    outfile.xllcorner = 0.0
1740    outfile.yllcorner = 0.0
1741    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1742    outfile.false_northing = 1000000
1743
1744    outfile.projection = projection
1745    outfile.datum = datum
1746    outfile.units = units
1747
1748
1749    outfile.createDimension('number_of_points', len(points))
1750    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1751
1752    # variable definitions
1753    outfile.createVariable('points', Float, ('number_of_points',
1754                                             'number_of_dimensions'))
1755    outfile.createVariable('elevation', Float, ('number_of_points',))
1756
1757    # Get handles to the variables
1758    outfile.variables['points'][:] = points
1759    outfile.variables['elevation'][:] = elevation
1760
1761    outfile.close()
1762
1763
1764
1765def sww2dem(basename_in, basename_out = None,
1766            quantity = None,
1767            timestep = None,
1768            reduction = None,
1769            cellsize = 10,
1770            NODATA_value = -9999,
1771            easting_min = None,
1772            easting_max = None,
1773            northing_min = None,
1774            northing_max = None,
1775            verbose = False,
1776            origin = None,
1777            datum = 'WGS84',
1778        format = 'ers'):
1779
1780    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1781
1782    Example (ASC):
1783
1784    ncols         3121
1785    nrows         1800
1786    xllcorner     722000
1787    yllcorner     5893000
1788    cellsize      25
1789    NODATA_value  -9999
1790    138.3698 137.4194 136.5062 135.5558 ..........
1791
1792    Also write accompanying file with same basename_in but extension .prj
1793    used to fix the UTM zone, datum, false northings and eastings.
1794
1795    The prj format is assumed to be as
1796
1797    Projection    UTM
1798    Zone          56
1799    Datum         WGS84
1800    Zunits        NO
1801    Units         METERS
1802    Spheroid      WGS84
1803    Xshift        0.0000000000
1804    Yshift        10000000.0000000000
1805    Parameters
1806
1807
1808    The parameter quantity must be the name of an existing quantity or
1809    an expression involving existing quantities. The default is
1810    'elevation'.
1811
1812    if timestep (an index) is given, output quantity at that timestep
1813
1814    if reduction is given use that to reduce quantity over all timesteps.
1815
1816    datum
1817
1818    format can be either 'asc' or 'ers'
1819    """
1820
1821    import sys
1822    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1823    from Numeric import array2string
1824
1825    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1826    from anuga.pyvolution.util import apply_expression_to_dictionary
1827
1828    msg = 'Format must be either asc or ers'
1829    assert format.lower() in ['asc', 'ers'], msg
1830
1831
1832    false_easting = 500000
1833    false_northing = 10000000
1834
1835    if quantity is None:
1836        quantity = 'elevation'
1837
1838    if reduction is None:
1839        reduction = max
1840
1841    if basename_out is None:
1842        basename_out = basename_in + '_%s' %quantity
1843
1844    swwfile = basename_in + '.sww'
1845    demfile = basename_out + '.' + format
1846    # Note the use of a .ers extension is optional (write_ermapper_grid will
1847    # deal with either option
1848
1849    #Read sww file
1850    if verbose: print 'Reading from %s' %swwfile
1851    from Scientific.IO.NetCDF import NetCDFFile
1852    fid = NetCDFFile(swwfile)
1853
1854    #Get extent and reference
1855    x = fid.variables['x'][:]
1856    y = fid.variables['y'][:]
1857    volumes = fid.variables['volumes'][:]
1858
1859    number_of_timesteps = fid.dimensions['number_of_timesteps']
1860    number_of_points = fid.dimensions['number_of_points']
1861    if origin is None:
1862
1863        #Get geo_reference
1864        #sww files don't have to have a geo_ref
1865        try:
1866            geo_reference = Geo_reference(NetCDFObject=fid)
1867        except AttributeError, e:
1868            geo_reference = Geo_reference() #Default georef object
1869
1870        xllcorner = geo_reference.get_xllcorner()
1871        yllcorner = geo_reference.get_yllcorner()
1872        zone = geo_reference.get_zone()
1873    else:
1874        zone = origin[0]
1875        xllcorner = origin[1]
1876        yllcorner = origin[2]
1877
1878
1879
1880    #FIXME: Refactor using code from file_function.statistics
1881    #Something like print swwstats(swwname)
1882    if verbose:
1883        x = fid.variables['x'][:]
1884        y = fid.variables['y'][:]
1885        times = fid.variables['time'][:]
1886        print '------------------------------------------------'
1887        print 'Statistics of SWW file:'
1888        print '  Name: %s' %swwfile
1889        print '  Reference:'
1890        print '    Lower left corner: [%f, %f]'\
1891              %(xllcorner, yllcorner)
1892        print '    Start time: %f' %fid.starttime[0]
1893        print '  Extent:'
1894        print '    x [m] in [%f, %f], len(x) == %d'\
1895              %(min(x.flat), max(x.flat), len(x.flat))
1896        print '    y [m] in [%f, %f], len(y) == %d'\
1897              %(min(y.flat), max(y.flat), len(y.flat))
1898        print '    t [s] in [%f, %f], len(t) == %d'\
1899              %(min(times), max(times), len(times))
1900        print '  Quantities [SI units]:'
1901        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1902            q = fid.variables[name][:].flat
1903            print '    %s in [%f, %f]' %(name, min(q), max(q))
1904
1905
1906
1907
1908
1909    # Get quantity and reduce if applicable
1910    if verbose: print 'Processing quantity %s' %quantity
1911
1912    # Turn NetCDF objects into Numeric arrays
1913    quantity_dict = {}
1914    for name in fid.variables.keys():
1915        quantity_dict[name] = fid.variables[name][:]
1916
1917
1918
1919    # Convert quantity expression to quantities found in sww file   
1920    q = apply_expression_to_dictionary(quantity, quantity_dict)
1921
1922
1923
1924    if len(q.shape) == 2:
1925        #q has a time component and needs to be reduced along
1926        #the temporal dimension
1927        if verbose: print 'Reducing quantity %s' %quantity
1928        q_reduced = zeros( number_of_points, Float )
1929
1930        for k in range(number_of_points):
1931            q_reduced[k] = reduction( q[:,k] )
1932
1933        q = q_reduced
1934
1935    #Post condition: Now q has dimension: number_of_points
1936    assert len(q.shape) == 1
1937    assert q.shape[0] == number_of_points
1938
1939
1940    if verbose:
1941        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1942
1943
1944    #Create grid and update xll/yll corner and x,y
1945
1946    #Relative extent
1947    if easting_min is None:
1948        xmin = min(x)
1949    else:
1950        xmin = easting_min - xllcorner
1951
1952    if easting_max is None:
1953        xmax = max(x)
1954    else:
1955        xmax = easting_max - xllcorner
1956
1957    if northing_min is None:
1958        ymin = min(y)
1959    else:
1960        ymin = northing_min - yllcorner
1961
1962    if northing_max is None:
1963        ymax = max(y)
1964    else:
1965        ymax = northing_max - yllcorner
1966
1967
1968
1969    if verbose: print 'Creating grid'
1970    ncols = int((xmax-xmin)/cellsize)+1
1971    nrows = int((ymax-ymin)/cellsize)+1
1972
1973
1974    #New absolute reference and coordinates
1975    newxllcorner = xmin+xllcorner
1976    newyllcorner = ymin+yllcorner
1977
1978    x = x+xllcorner-newxllcorner
1979    y = y+yllcorner-newyllcorner
1980
1981    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1982    assert len(vertex_points.shape) == 2
1983
1984
1985
1986    grid_points = zeros ( (ncols*nrows, 2), Float )
1987
1988
1989    for i in xrange(nrows):
1990        if format.lower() == 'asc':
1991            yg = i*cellsize
1992        else:
1993        #this will flip the order of the y values for ers
1994            yg = (nrows-i)*cellsize
1995
1996        for j in xrange(ncols):
1997            xg = j*cellsize
1998            k = i*ncols + j
1999
2000            grid_points[k,0] = xg
2001            grid_points[k,1] = yg
2002
2003    #Interpolate
2004    #from least_squares import Interpolation
2005    from anuga.fit_interpolate.interpolate import Interpolate
2006
2007
2008    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2009
2010
2011
2012    #Interpolate using quantity values
2013    if verbose: print 'Interpolating'
2014    grid_values = interp.interpolate(q, grid_points).flat
2015
2016
2017    if verbose:
2018        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2019                                                      max(grid_values))
2020
2021    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2022    P = interp.mesh.get_boundary_polygon()
2023    outside_indices = outside_polygon(grid_points, P, closed=True)
2024
2025    for i in outside_indices:
2026        grid_values[i] = NODATA_value
2027
2028
2029
2030
2031    if format.lower() == 'ers':
2032        # setup ERS header information
2033        grid_values = reshape(grid_values,(nrows, ncols))
2034        header = {}
2035        header['datum'] = '"' + datum + '"'
2036        # FIXME The use of hardwired UTM and zone number needs to be made optional
2037        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2038        header['projection'] = '"UTM-' + str(zone) + '"'
2039        header['coordinatetype'] = 'EN'
2040        if header['coordinatetype'] == 'LL':
2041            header['longitude'] = str(newxllcorner)
2042            header['latitude'] = str(newyllcorner)
2043        elif header['coordinatetype'] == 'EN':
2044            header['eastings'] = str(newxllcorner)
2045            header['northings'] = str(newyllcorner)
2046        header['nullcellvalue'] = str(NODATA_value)
2047        header['xdimension'] = str(cellsize)
2048        header['ydimension'] = str(cellsize)
2049        header['value'] = '"' + quantity + '"'
2050        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2051
2052
2053        #Write
2054        if verbose: print 'Writing %s' %demfile
2055        import ermapper_grids
2056        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2057
2058        fid.close()
2059    else:
2060        #Write to Ascii format
2061
2062        #Write prj file
2063        prjfile = basename_out + '.prj'
2064
2065        if verbose: print 'Writing %s' %prjfile
2066        prjid = open(prjfile, 'w')
2067        prjid.write('Projection    %s\n' %'UTM')
2068        prjid.write('Zone          %d\n' %zone)
2069        prjid.write('Datum         %s\n' %datum)
2070        prjid.write('Zunits        NO\n')
2071        prjid.write('Units         METERS\n')
2072        prjid.write('Spheroid      %s\n' %datum)
2073        prjid.write('Xshift        %d\n' %false_easting)
2074        prjid.write('Yshift        %d\n' %false_northing)
2075        prjid.write('Parameters\n')
2076        prjid.close()
2077
2078
2079
2080        if verbose: print 'Writing %s' %demfile
2081
2082        ascid = open(demfile, 'w')
2083
2084        ascid.write('ncols         %d\n' %ncols)
2085        ascid.write('nrows         %d\n' %nrows)
2086        ascid.write('xllcorner     %d\n' %newxllcorner)
2087        ascid.write('yllcorner     %d\n' %newyllcorner)
2088        ascid.write('cellsize      %f\n' %cellsize)
2089        ascid.write('NODATA_value  %d\n' %NODATA_value)
2090
2091
2092        #Get bounding polygon from mesh
2093        #P = interp.mesh.get_boundary_polygon()
2094        #inside_indices = inside_polygon(grid_points, P)
2095
2096        for i in range(nrows):
2097            if verbose and i%((nrows+10)/10)==0:
2098                print 'Doing row %d of %d' %(i, nrows)
2099
2100            base_index = (nrows-i-1)*ncols
2101
2102            slice = grid_values[base_index:base_index+ncols]
2103            s = array2string(slice, max_line_width=sys.maxint)
2104            ascid.write(s[1:-1] + '\n')
2105
2106
2107            #print
2108            #for j in range(ncols):
2109            #    index = base_index+j#
2110            #    print grid_values[index],
2111            #    ascid.write('%f ' %grid_values[index])
2112            #ascid.write('\n')
2113
2114        #Close
2115        ascid.close()
2116        fid.close()
2117
2118#Backwards compatibility
2119def sww2asc(basename_in, basename_out = None,
2120            quantity = None,
2121            timestep = None,
2122            reduction = None,
2123            cellsize = 10,
2124            verbose = False,
2125            origin = None):
2126    print 'sww2asc will soon be obsoleted - please use sww2dem'
2127    sww2dem(basename_in,
2128            basename_out = basename_out,
2129            quantity = quantity,
2130            timestep = timestep,
2131            reduction = reduction,
2132            cellsize = cellsize,
2133            verbose = verbose,
2134            origin = origin,
2135        datum = 'WGS84',
2136        format = 'asc')
2137
2138def sww2ers(basename_in, basename_out = None,
2139            quantity = None,
2140            timestep = None,
2141            reduction = None,
2142            cellsize = 10,
2143            verbose = False,
2144            origin = None,
2145            datum = 'WGS84'):
2146    print 'sww2ers will soon be obsoleted - please use sww2dem'
2147    sww2dem(basename_in,
2148            basename_out = basename_out,
2149            quantity = quantity,
2150            timestep = timestep,
2151            reduction = reduction,
2152            cellsize = cellsize,
2153            verbose = verbose,
2154            origin = origin,
2155            datum = datum,
2156            format = 'ers')
2157################################# END COMPATIBILITY ##############
2158
2159
2160
2161def sww2pts(basename_in, basename_out=None,
2162            data_points=None,
2163            quantity=None,
2164            timestep=None,
2165            reduction=None,
2166            NODATA_value=-9999,
2167            verbose=False,
2168            origin=None):
2169            #datum = 'WGS84')
2170
2171
2172    """Read SWW file and convert to interpolated values at selected points
2173
2174    The parameter quantity' must be the name of an existing quantity or
2175    an expression involving existing quantities. The default is
2176    'elevation'.
2177
2178    if timestep (an index) is given, output quantity at that timestep
2179
2180    if reduction is given use that to reduce quantity over all timesteps.
2181
2182    data_points (Nx2 array) give locations of points where quantity is to be computed.
2183   
2184    """
2185
2186    import sys
2187    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2188    from Numeric import array2string
2189
2190    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2191    from anuga.pyvolution.util import apply_expression_to_dictionary
2192
2193    from anuga.geospatial_data.geospatial_data import Geospatial_data
2194
2195    if quantity is None:
2196        quantity = 'elevation'
2197
2198    if reduction is None:
2199        reduction = max
2200
2201    if basename_out is None:
2202        basename_out = basename_in + '_%s' %quantity
2203
2204    swwfile = basename_in + '.sww'
2205    ptsfile = basename_out + '.pts'
2206
2207    # Read sww file
2208    if verbose: print 'Reading from %s' %swwfile
2209    from Scientific.IO.NetCDF import NetCDFFile
2210    fid = NetCDFFile(swwfile)
2211
2212    # Get extent and reference
2213    x = fid.variables['x'][:]
2214    y = fid.variables['y'][:]
2215    volumes = fid.variables['volumes'][:]
2216
2217    number_of_timesteps = fid.dimensions['number_of_timesteps']
2218    number_of_points = fid.dimensions['number_of_points']
2219    if origin is None:
2220
2221        # Get geo_reference
2222        # sww files don't have to have a geo_ref
2223        try:
2224            geo_reference = Geo_reference(NetCDFObject=fid)
2225        except AttributeError, e:
2226            geo_reference = Geo_reference() #Default georef object
2227
2228        xllcorner = geo_reference.get_xllcorner()
2229        yllcorner = geo_reference.get_yllcorner()
2230        zone = geo_reference.get_zone()
2231    else:
2232        zone = origin[0]
2233        xllcorner = origin[1]
2234        yllcorner = origin[2]
2235
2236
2237
2238    # FIXME: Refactor using code from file_function.statistics
2239    # Something like print swwstats(swwname)
2240    if verbose:
2241        x = fid.variables['x'][:]
2242        y = fid.variables['y'][:]
2243        times = fid.variables['time'][:]
2244        print '------------------------------------------------'
2245        print 'Statistics of SWW file:'
2246        print '  Name: %s' %swwfile
2247        print '  Reference:'
2248        print '    Lower left corner: [%f, %f]'\
2249              %(xllcorner, yllcorner)
2250        print '    Start time: %f' %fid.starttime[0]
2251        print '  Extent:'
2252        print '    x [m] in [%f, %f], len(x) == %d'\
2253              %(min(x.flat), max(x.flat), len(x.flat))
2254        print '    y [m] in [%f, %f], len(y) == %d'\
2255              %(min(y.flat), max(y.flat), len(y.flat))
2256        print '    t [s] in [%f, %f], len(t) == %d'\
2257              %(min(times), max(times), len(times))
2258        print '  Quantities [SI units]:'
2259        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2260            q = fid.variables[name][:].flat
2261            print '    %s in [%f, %f]' %(name, min(q), max(q))
2262
2263
2264
2265    # Get quantity and reduce if applicable
2266    if verbose: print 'Processing quantity %s' %quantity
2267
2268    # Turn NetCDF objects into Numeric arrays
2269    quantity_dict = {}
2270    for name in fid.variables.keys():
2271        quantity_dict[name] = fid.variables[name][:]
2272
2273
2274
2275    # Convert quantity expression to quantities found in sww file   
2276    q = apply_expression_to_dictionary(quantity, quantity_dict)
2277
2278
2279
2280    if len(q.shape) == 2:
2281        # q has a time component and needs to be reduced along
2282        # the temporal dimension
2283        if verbose: print 'Reducing quantity %s' %quantity
2284        q_reduced = zeros( number_of_points, Float )
2285
2286        for k in range(number_of_points):
2287            q_reduced[k] = reduction( q[:,k] )
2288
2289        q = q_reduced
2290
2291    # Post condition: Now q has dimension: number_of_points
2292    assert len(q.shape) == 1
2293    assert q.shape[0] == number_of_points
2294
2295
2296    if verbose:
2297        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2298
2299
2300    # Create grid and update xll/yll corner and x,y
2301    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2302    assert len(vertex_points.shape) == 2
2303
2304    # Interpolate
2305    from anuga.fit_interpolate.interpolate import Interpolate
2306    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2307
2308    # Interpolate using quantity values
2309    if verbose: print 'Interpolating'
2310    interpolated_values = interp.interpolate(q, data_points).flat
2311
2312
2313    if verbose:
2314        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2315                                                      max(interpolated_values))
2316
2317    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2318    P = interp.mesh.get_boundary_polygon()
2319    outside_indices = outside_polygon(data_points, P, closed=True)
2320
2321    for i in outside_indices:
2322        interpolated_values[i] = NODATA_value
2323
2324
2325    # Store results   
2326    G = Geospatial_data(data_points=data_points,
2327                        attributes=interpolated_values)
2328
2329    G.export_points_file(ptsfile, absolute = True)
2330
2331    fid.close()
2332
2333
2334def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2335                                  use_cache = False,
2336                                  verbose = False):
2337    """Read Digitial Elevation model from the following ASCII format (.asc)
2338
2339    Example:
2340
2341    ncols         3121
2342    nrows         1800
2343    xllcorner     722000
2344    yllcorner     5893000
2345    cellsize      25
2346    NODATA_value  -9999
2347    138.3698 137.4194 136.5062 135.5558 ..........
2348
2349    Convert basename_in + '.asc' to NetCDF format (.dem)
2350    mimicking the ASCII format closely.
2351
2352
2353    An accompanying file with same basename_in but extension .prj must exist
2354    and is used to fix the UTM zone, datum, false northings and eastings.
2355
2356    The prj format is assumed to be as
2357
2358    Projection    UTM
2359    Zone          56
2360    Datum         WGS84
2361    Zunits        NO
2362    Units         METERS
2363    Spheroid      WGS84
2364    Xshift        0.0000000000
2365    Yshift        10000000.0000000000
2366    Parameters
2367    """
2368
2369
2370
2371    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2372
2373    if use_cache is True:
2374        from caching import cache
2375        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2376                       dependencies = [basename_in + '.asc'],
2377                       verbose = verbose)
2378
2379    else:
2380        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2381
2382    return result
2383
2384
2385
2386
2387
2388
2389def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2390                                  verbose = False):
2391    """Read Digitial Elevation model from the following ASCII format (.asc)
2392
2393    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2394    """
2395
2396    import os
2397    from Scientific.IO.NetCDF import NetCDFFile
2398    from Numeric import Float, array
2399
2400    #root, ext = os.path.splitext(basename_in)
2401    root = basename_in
2402
2403    ###########################################
2404    # Read Meta data
2405    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2406    metadatafile = open(root + '.prj')
2407    metalines = metadatafile.readlines()
2408    metadatafile.close()
2409
2410    L = metalines[0].strip().split()
2411    assert L[0].strip().lower() == 'projection'
2412    projection = L[1].strip()                   #TEXT
2413
2414    L = metalines[1].strip().split()
2415    assert L[0].strip().lower() == 'zone'
2416    zone = int(L[1].strip())
2417
2418    L = metalines[2].strip().split()
2419    assert L[0].strip().lower() == 'datum'
2420    datum = L[1].strip()                        #TEXT
2421
2422    L = metalines[3].strip().split()
2423    assert L[0].strip().lower() == 'zunits'     #IGNORE
2424    zunits = L[1].strip()                       #TEXT
2425
2426    L = metalines[4].strip().split()
2427    assert L[0].strip().lower() == 'units'
2428    units = L[1].strip()                        #TEXT
2429
2430    L = metalines[5].strip().split()
2431    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2432    spheroid = L[1].strip()                     #TEXT
2433
2434    L = metalines[6].strip().split()
2435    assert L[0].strip().lower() == 'xshift'
2436    false_easting = float(L[1].strip())
2437
2438    L = metalines[7].strip().split()
2439    assert L[0].strip().lower() == 'yshift'
2440    false_northing = float(L[1].strip())
2441
2442    #print false_easting, false_northing, zone, datum
2443
2444
2445    ###########################################
2446    #Read DEM data
2447
2448    datafile = open(basename_in + '.asc')
2449
2450    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2451    lines = datafile.readlines()
2452    datafile.close()
2453
2454    if verbose: print 'Got', len(lines), ' lines'
2455
2456    ncols = int(lines[0].split()[1].strip())
2457    nrows = int(lines[1].split()[1].strip())
2458    xllcorner = float(lines[2].split()[1].strip())
2459    yllcorner = float(lines[3].split()[1].strip())
2460    cellsize = float(lines[4].split()[1].strip())
2461    NODATA_value = int(lines[5].split()[1].strip())
2462
2463    assert len(lines) == nrows + 6
2464
2465
2466    ##########################################
2467
2468
2469    if basename_out == None:
2470        netcdfname = root + '.dem'
2471    else:
2472        netcdfname = basename_out + '.dem'
2473
2474    if verbose: print 'Store to NetCDF file %s' %netcdfname
2475    # NetCDF file definition
2476    fid = NetCDFFile(netcdfname, 'w')
2477
2478    #Create new file
2479    fid.institution = 'Geoscience Australia'
2480    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2481                      'of spatial point data'
2482
2483    fid.ncols = ncols
2484    fid.nrows = nrows
2485    fid.xllcorner = xllcorner
2486    fid.yllcorner = yllcorner
2487    fid.cellsize = cellsize
2488    fid.NODATA_value = NODATA_value
2489
2490    fid.zone = zone
2491    fid.false_easting = false_easting
2492    fid.false_northing = false_northing
2493    fid.projection = projection
2494    fid.datum = datum
2495    fid.units = units
2496
2497
2498    # dimension definitions
2499    fid.createDimension('number_of_rows', nrows)
2500    fid.createDimension('number_of_columns', ncols)
2501
2502    # variable definitions
2503    fid.createVariable('elevation', Float, ('number_of_rows',
2504                                            'number_of_columns'))
2505
2506    # Get handles to the variables
2507    elevation = fid.variables['elevation']
2508
2509    #Store data
2510    n = len(lines[6:])
2511    for i, line in enumerate(lines[6:]):
2512        fields = line.split()
2513        if verbose and i%((n+10)/10)==0:
2514            print 'Processing row %d of %d' %(i, nrows)
2515
2516        elevation[i, :] = array([float(x) for x in fields])
2517
2518    fid.close()
2519
2520
2521
2522
2523
2524def ferret2sww(basename_in, basename_out = None,
2525               verbose = False,
2526               minlat = None, maxlat = None,
2527               minlon = None, maxlon = None,
2528               mint = None, maxt = None, mean_stage = 0,
2529               origin = None, zscale = 1,
2530               fail_on_NaN = True,
2531               NaN_filler = 0,
2532               elevation = None,
2533               inverted_bathymetry = False
2534               ): #FIXME: Bathymetry should be obtained
2535                                  #from MOST somehow.
2536                                  #Alternatively from elsewhere
2537                                  #or, as a last resort,
2538                                  #specified here.
2539                                  #The value of -100 will work
2540                                  #for the Wollongong tsunami
2541                                  #scenario but is very hacky
2542    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2543    sww format native to pyvolution.
2544
2545    Specify only basename_in and read files of the form
2546    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2547    relative height, x-velocity and y-velocity, respectively.
2548
2549    Also convert latitude and longitude to UTM. All coordinates are
2550    assumed to be given in the GDA94 datum.
2551
2552    min's and max's: If omitted - full extend is used.
2553    To include a value min may equal it, while max must exceed it.
2554    Lat and lon are assuemd to be in decimal degrees
2555
2556    origin is a 3-tuple with geo referenced
2557    UTM coordinates (zone, easting, northing)
2558
2559    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2560    which means that longitude is the fastest
2561    varying dimension (row major order, so to speak)
2562
2563    ferret2sww uses grid points as vertices in a triangular grid
2564    counting vertices from lower left corner upwards, then right
2565    """
2566
2567    import os
2568    from Scientific.IO.NetCDF import NetCDFFile
2569    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2570    from Numeric import allclose, around
2571
2572    precision = Float
2573
2574    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2575
2576    if minlat != None:
2577        assert -90 < minlat < 90 , msg
2578    if maxlat != None:
2579        assert -90 < maxlat < 90 , msg
2580    if minlon != None:
2581        assert -180 < minlon < 180 , msg
2582    if maxlon != None:
2583        assert -180 < maxlon < 180 , msg
2584
2585
2586    #Get NetCDF data
2587    if verbose: print 'Reading files %s_*.nc' %basename_in
2588    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2589    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2590    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2591    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2592
2593    if basename_out is None:
2594        swwname = basename_in + '.sww'
2595    else:
2596        swwname = basename_out + '.sww'
2597
2598#    Get dimensions of file_h
2599    for dimension in file_h.dimensions.keys():
2600        if dimension[:3] == 'LON':
2601            dim_h_longitude = dimension
2602        if dimension[:3] == 'LAT':
2603            dim_h_latitude = dimension
2604        if dimension[:4] == 'TIME':
2605            dim_h_time = dimension
2606
2607#    print 'long:', dim_h_longitude
2608#    print 'lats:', dim_h_latitude
2609#    print 'times:', dim_h_time
2610
2611    times = file_h.variables[dim_h_time]
2612    latitudes = file_h.variables[dim_h_latitude]
2613    longitudes = file_h.variables[dim_h_longitude]
2614   
2615# get dimensions for file_e
2616    for dimension in file_e.dimensions.keys():
2617        if dimension[:3] == 'LON':
2618            dim_e_longitude = dimension
2619        if dimension[:3] == 'LAT':
2620            dim_e_latitude = dimension
2621        if dimension[:4] == 'TIME':
2622            dim_e_time = dimension
2623
2624# get dimensions for file_u
2625    for dimension in file_u.dimensions.keys():
2626        if dimension[:3] == 'LON':
2627            dim_u_longitude = dimension
2628        if dimension[:3] == 'LAT':
2629            dim_u_latitude = dimension
2630        if dimension[:4] == 'TIME':
2631            dim_u_time = dimension
2632           
2633# get dimensions for file_v
2634    for dimension in file_v.dimensions.keys():
2635        if dimension[:3] == 'LON':
2636            dim_v_longitude = dimension
2637        if dimension[:3] == 'LAT':
2638            dim_v_latitude = dimension
2639        if dimension[:4] == 'TIME':
2640            dim_v_time = dimension
2641
2642
2643    #Precision used by most for lat/lon is 4 or 5 decimals
2644    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2645    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2646
2647    #Check that files are compatible
2648    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2649    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2650    assert allclose(latitudes, e_lat)
2651
2652    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2653    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2654    assert allclose(longitudes, e_lon)
2655
2656    if mint == None:
2657        jmin = 0
2658    else:
2659        jmin = searchsorted(times, mint)
2660
2661    if maxt == None:
2662        jmax=len(times)
2663    else:
2664        jmax = searchsorted(times, maxt)
2665
2666    if minlat == None:
2667        kmin=0
2668    else:
2669        kmin = searchsorted(latitudes, minlat)
2670
2671    if maxlat == None:
2672        kmax = len(latitudes)
2673    else:
2674        kmax = searchsorted(latitudes, maxlat)
2675
2676    if minlon == None:
2677        lmin=0
2678    else:
2679        lmin = searchsorted(longitudes, minlon)
2680
2681    if maxlon == None:
2682        lmax = len(longitudes)
2683    else:
2684        lmax = searchsorted(longitudes, maxlon)
2685
2686#    print' j', jmin, jmax
2687    times = times[jmin:jmax]
2688    latitudes = latitudes[kmin:kmax]
2689    longitudes = longitudes[lmin:lmax]
2690
2691
2692    if verbose: print 'cropping'
2693    zname = 'ELEVATION'
2694
2695    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2696    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2697    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2698    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2699
2700    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2701    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2702    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2703    #        from Numeric import asarray
2704    #        elevations=elevations.tolist()
2705    #        elevations.reverse()
2706    #        elevations=asarray(elevations)
2707    #    else:
2708    #        from Numeric import asarray
2709    #        elevations=elevations.tolist()
2710    #        elevations.reverse()
2711    #        elevations=asarray(elevations)
2712    #        'print hmmm'
2713
2714
2715
2716    #Get missing values
2717    nan_ha = file_h.variables['HA'].missing_value[0]
2718    nan_ua = file_u.variables['UA'].missing_value[0]
2719    nan_va = file_v.variables['VA'].missing_value[0]
2720    if hasattr(file_e.variables[zname],'missing_value'):
2721        nan_e  = file_e.variables[zname].missing_value[0]
2722    else:
2723        nan_e = None
2724
2725    #Cleanup
2726    from Numeric import sometrue
2727
2728    missing = (amplitudes == nan_ha)
2729    if sometrue (missing):
2730        if fail_on_NaN:
2731            msg = 'NetCDFFile %s contains missing values'\
2732                  %(basename_in+'_ha.nc')
2733            raise DataMissingValuesError, msg
2734        else:
2735            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2736
2737    missing = (uspeed == nan_ua)
2738    if sometrue (missing):
2739        if fail_on_NaN:
2740            msg = 'NetCDFFile %s contains missing values'\
2741                  %(basename_in+'_ua.nc')
2742            raise DataMissingValuesError, msg
2743        else:
2744            uspeed = uspeed*(missing==0) + missing*NaN_filler
2745
2746    missing = (vspeed == nan_va)
2747    if sometrue (missing):
2748        if fail_on_NaN:
2749            msg = 'NetCDFFile %s contains missing values'\
2750                  %(basename_in+'_va.nc')
2751            raise DataMissingValuesError, msg
2752        else:
2753            vspeed = vspeed*(missing==0) + missing*NaN_filler
2754
2755
2756    missing = (elevations == nan_e)
2757    if sometrue (missing):
2758        if fail_on_NaN:
2759            msg = 'NetCDFFile %s contains missing values'\
2760                  %(basename_in+'_e.nc')
2761            raise DataMissingValuesError, msg
2762        else:
2763            elevations = elevations*(missing==0) + missing*NaN_filler
2764
2765    #######
2766
2767
2768
2769    number_of_times = times.shape[0]
2770    number_of_latitudes = latitudes.shape[0]
2771    number_of_longitudes = longitudes.shape[0]
2772
2773    assert amplitudes.shape[0] == number_of_times
2774    assert amplitudes.shape[1] == number_of_latitudes
2775    assert amplitudes.shape[2] == number_of_longitudes
2776
2777    if verbose:
2778        print '------------------------------------------------'
2779        print 'Statistics:'
2780        print '  Extent (lat/lon):'
2781        print '    lat in [%f, %f], len(lat) == %d'\
2782              %(min(latitudes.flat), max(latitudes.flat),
2783                len(latitudes.flat))
2784        print '    lon in [%f, %f], len(lon) == %d'\
2785              %(min(longitudes.flat), max(longitudes.flat),
2786                len(longitudes.flat))
2787        print '    t in [%f, %f], len(t) == %d'\
2788              %(min(times.flat), max(times.flat), len(times.flat))
2789
2790        q = amplitudes.flat
2791        name = 'Amplitudes (ha) [cm]'
2792        print %s in [%f, %f]' %(name, min(q), max(q))
2793
2794        q = uspeed.flat
2795        name = 'Speeds (ua) [cm/s]'
2796        print %s in [%f, %f]' %(name, min(q), max(q))
2797
2798        q = vspeed.flat
2799        name = 'Speeds (va) [cm/s]'
2800        print %s in [%f, %f]' %(name, min(q), max(q))
2801
2802        q = elevations.flat
2803        name = 'Elevations (e) [m]'
2804        print %s in [%f, %f]' %(name, min(q), max(q))
2805
2806
2807    #print number_of_latitudes, number_of_longitudes
2808    number_of_points = number_of_latitudes*number_of_longitudes
2809    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2810
2811
2812    file_h.close()
2813    file_u.close()
2814    file_v.close()
2815    file_e.close()
2816
2817
2818    # NetCDF file definition
2819    outfile = NetCDFFile(swwname, 'w')
2820
2821    #Create new file
2822    outfile.institution = 'Geoscience Australia'
2823    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2824                          %(basename_in + '_ha.nc',
2825                            basename_in + '_ua.nc',
2826                            basename_in + '_va.nc',
2827                            basename_in + '_e.nc')
2828
2829
2830    #For sww compatibility
2831    outfile.smoothing = 'Yes'
2832    outfile.order = 1
2833
2834    #Start time in seconds since the epoch (midnight 1/1/1970)
2835    outfile.starttime = starttime = times[0]
2836    times = times - starttime  #Store relative times
2837
2838    # dimension definitions
2839    outfile.createDimension('number_of_volumes', number_of_volumes)
2840
2841    outfile.createDimension('number_of_vertices', 3)
2842    outfile.createDimension('number_of_points', number_of_points)
2843
2844
2845    #outfile.createDimension('number_of_timesteps', len(times))
2846    outfile.createDimension('number_of_timesteps', len(times))
2847
2848    # variable definitions
2849    outfile.createVariable('x', precision, ('number_of_points',))
2850    outfile.createVariable('y', precision, ('number_of_points',))
2851    outfile.createVariable('elevation', precision, ('number_of_points',))
2852
2853    #FIXME: Backwards compatibility
2854    outfile.createVariable('z', precision, ('number_of_points',))
2855    #################################
2856
2857    outfile.createVariable('volumes', Int, ('number_of_volumes',
2858                                            'number_of_vertices'))
2859
2860    outfile.createVariable('time', precision,
2861                           ('number_of_timesteps',))
2862
2863    outfile.createVariable('stage', precision,
2864                           ('number_of_timesteps',
2865                            'number_of_points'))
2866
2867    outfile.createVariable('xmomentum', precision,
2868                           ('number_of_timesteps',
2869                            'number_of_points'))
2870
2871    outfile.createVariable('ymomentum', precision,
2872                           ('number_of_timesteps',
2873                            'number_of_points'))
2874
2875
2876    #Store
2877    from anuga.coordinate_transforms.redfearn import redfearn
2878    x = zeros(number_of_points, Float)  #Easting
2879    y = zeros(number_of_points, Float)  #Northing
2880
2881
2882    if verbose: print 'Making triangular grid'
2883    #Check zone boundaries
2884    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2885
2886    vertices = {}
2887    i = 0
2888    for k, lat in enumerate(latitudes):       #Y direction
2889        for l, lon in enumerate(longitudes):  #X direction
2890
2891            vertices[l,k] = i
2892
2893            zone, easting, northing = redfearn(lat,lon)
2894
2895            msg = 'Zone boundary crossed at longitude =', lon
2896            #assert zone == refzone, msg
2897            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2898            x[i] = easting
2899            y[i] = northing
2900            i += 1
2901
2902
2903    #Construct 2 triangles per 'rectangular' element
2904    volumes = []
2905    for l in range(number_of_longitudes-1):    #X direction
2906        for k in range(number_of_latitudes-1): #Y direction
2907            v1 = vertices[l,k+1]
2908            v2 = vertices[l,k]
2909            v3 = vertices[l+1,k+1]
2910            v4 = vertices[l+1,k]
2911
2912            volumes.append([v1,v2,v3]) #Upper element
2913            volumes.append([v4,v3,v2]) #Lower element
2914
2915    volumes = array(volumes)
2916
2917    if origin == None:
2918        zone = refzone
2919        xllcorner = min(x)
2920        yllcorner = min(y)
2921    else:
2922        zone = origin[0]
2923        xllcorner = origin[1]
2924        yllcorner = origin[2]
2925
2926
2927    outfile.xllcorner = xllcorner
2928    outfile.yllcorner = yllcorner
2929    outfile.zone = zone
2930
2931
2932    if elevation is not None:
2933        z = elevation
2934    else:
2935        if inverted_bathymetry:
2936            z = -1*elevations
2937        else:
2938            z = elevations
2939    #FIXME: z should be obtained from MOST and passed in here
2940
2941    from Numeric import resize
2942    z = resize(z,outfile.variables['z'][:].shape)
2943    outfile.variables['x'][:] = x - xllcorner
2944    outfile.variables['y'][:] = y - yllcorner
2945    outfile.variables['z'][:] = z             #FIXME HACK
2946    outfile.variables['elevation'][:] = z
2947    outfile.variables['time'][:] = times   #Store time relative
2948    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2949
2950
2951
2952    #Time stepping
2953    stage = outfile.variables['stage']
2954    xmomentum = outfile.variables['xmomentum']
2955    ymomentum = outfile.variables['ymomentum']
2956
2957    if verbose: print 'Converting quantities'
2958    n = len(times)
2959    for j in range(n):
2960        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2961        i = 0
2962        for k in range(number_of_latitudes):      #Y direction
2963            for l in range(number_of_longitudes): #X direction
2964                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2965                stage[j,i] = w
2966                h = w - z[i]
2967                xmomentum[j,i] = uspeed[j,k,l]/100*h
2968                ymomentum[j,i] = vspeed[j,k,l]/100*h
2969                i += 1
2970
2971    #outfile.close()
2972
2973    #FIXME: Refactor using code from file_function.statistics
2974    #Something like print swwstats(swwname)
2975    if verbose:
2976        x = outfile.variables['x'][:]
2977        y = outfile.variables['y'][:]
2978        times = outfile.variables['time'][:]
2979        print '------------------------------------------------'
2980        print 'Statistics of output file:'
2981        print '  Name: %s' %swwname
2982        print '  Reference:'
2983        print '    Lower left corner: [%f, %f]'\
2984              %(xllcorner, yllcorner)
2985        print '    Start time: %f' %starttime
2986        print '  Extent:'
2987        print '    x [m] in [%f, %f], len(x) == %d'\
2988              %(min(x.flat), max(x.flat), len(x.flat))
2989        print '    y [m] in [%f, %f], len(y) == %d'\
2990              %(min(y.flat), max(y.flat), len(y.flat))
2991        print '    t [s] in [%f, %f], len(t) == %d'\
2992              %(min(times), max(times), len(times))
2993        print '  Quantities [SI units]:'
2994        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2995            q = outfile.variables[name][:].flat
2996            print '    %s in [%f, %f]' %(name, min(q), max(q))
2997
2998
2999
3000    outfile.close()
3001
3002
3003
3004
3005
3006def timefile2netcdf(filename, quantity_names = None):
3007    """Template for converting typical text files with time series to
3008    NetCDF tms file.
3009
3010
3011    The file format is assumed to be either two fields separated by a comma:
3012
3013        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3014
3015    E.g
3016
3017      31/08/04 00:00:00, 1.328223 0 0
3018      31/08/04 00:15:00, 1.292912 0 0
3019
3020    will provide a time dependent function f(t) with three attributes
3021
3022    filename is assumed to be the rootname with extenisons .txt and .sww
3023    """
3024
3025    import time, calendar
3026    from Numeric import array
3027    from anuga.config import time_format
3028    from anuga.utilities.numerical_tools import ensure_numeric
3029
3030
3031
3032    fid = open(filename + '.txt')
3033    line = fid.readline()
3034    fid.close()
3035
3036    fields = line.split(',')
3037    msg = 'File %s must have the format date, value0 value1 value2 ...'
3038    assert len(fields) == 2, msg
3039
3040    try:
3041        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3042    except ValueError:
3043        msg = 'First field in file %s must be' %filename
3044        msg += ' date-time with format %s.\n' %time_format
3045        msg += 'I got %s instead.' %fields[0]
3046        raise DataTimeError, msg
3047
3048
3049    #Split values
3050    values = []
3051    for value in fields[1].split():
3052        values.append(float(value))
3053
3054    q = ensure_numeric(values)
3055
3056    msg = 'ERROR: File must contain at least one independent value'
3057    assert len(q.shape) == 1, msg
3058
3059
3060
3061    #Read times proper
3062    from Numeric import zeros, Float, alltrue
3063    from anuga.config import time_format
3064    import time, calendar
3065
3066    fid = open(filename + '.txt')
3067    lines = fid.readlines()
3068    fid.close()
3069
3070    N = len(lines)
3071    d = len(q)
3072
3073    T = zeros(N, Float)       #Time
3074    Q = zeros((N, d), Float)  #Values
3075
3076    for i, line in enumerate(lines):
3077        fields = line.split(',')
3078        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3079
3080        T[i] = realtime - starttime
3081
3082        for j, value in enumerate(fields[1].split()):
3083            Q[i, j] = float(value)
3084
3085    msg = 'File %s must list time as a monotonuosly ' %filename
3086    msg += 'increasing sequence'
3087    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3088
3089
3090    #Create NetCDF file
3091    from Scientific.IO.NetCDF import NetCDFFile
3092
3093    fid = NetCDFFile(filename + '.tms', 'w')
3094
3095
3096    fid.institution = 'Geoscience Australia'
3097    fid.description = 'Time series'
3098
3099
3100    #Reference point
3101    #Start time in seconds since the epoch (midnight 1/1/1970)
3102    #FIXME: Use Georef
3103    fid.starttime = starttime
3104
3105
3106    # dimension definitions
3107    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3108    #fid.createDimension('number_of_vertices', 3)
3109
3110
3111    fid.createDimension('number_of_timesteps', len(T))
3112
3113    fid.createVariable('time', Float, ('number_of_timesteps',))
3114
3115    fid.variables['time'][:] = T
3116
3117    for i in range(Q.shape[1]):
3118        try:
3119            name = quantity_names[i]
3120        except:
3121            name = 'Attribute%d'%i
3122
3123        fid.createVariable(name, Float, ('number_of_timesteps',))
3124        fid.variables[name][:] = Q[:,i]
3125
3126    fid.close()
3127
3128
3129def extent_sww(file_name):
3130    """
3131    Read in an sww file.
3132
3133    Input;
3134    file_name - the sww file
3135
3136    Output;
3137    z - Vector of bed elevation
3138    volumes - Array.  Each row has 3 values, representing
3139    the vertices that define the volume
3140    time - Vector of the times where there is stage information
3141    stage - array with respect to time and vertices (x,y)
3142    """
3143
3144
3145    from Scientific.IO.NetCDF import NetCDFFile
3146
3147    #Check contents
3148    #Get NetCDF
3149    fid = NetCDFFile(file_name, 'r')
3150
3151    # Get the variables
3152    x = fid.variables['x'][:]
3153    y = fid.variables['y'][:]
3154    stage = fid.variables['stage'][:]
3155    #print "stage",stage
3156    #print "stage.shap",stage.shape
3157    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3158    #print "min(stage)",min(stage)
3159
3160    fid.close()
3161
3162    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3163
3164
3165def sww2domain(filename,boundary=None,t=None,\
3166               fail_if_NaN=True,NaN_filler=0\
3167               ,verbose = False,very_verbose = False):
3168    """
3169    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3170
3171    Boundary is not recommended if domain.smooth is not selected, as it
3172    uses unique coordinates, but not unique boundaries. This means that
3173    the boundary file will not be compatable with the coordinates, and will
3174    give a different final boundary, or crash.
3175    """
3176    NaN=9.969209968386869e+036
3177    #initialise NaN.
3178
3179    from Scientific.IO.NetCDF import NetCDFFile
3180    from shallow_water import Domain
3181    from Numeric import asarray, transpose, resize
3182
3183    if verbose: print 'Reading from ', filename
3184    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3185    time = fid.variables['time']       #Timesteps
3186    if t is None:
3187        t = time[-1]
3188    time_interp = get_time_interp(time,t)
3189
3190    # Get the variables as Numeric arrays
3191    x = fid.variables['x'][:]             #x-coordinates of vertices
3192    y = fid.variables['y'][:]             #y-coordinates of vertices
3193    elevation = fid.variables['elevation']     #Elevation
3194    stage = fid.variables['stage']     #Water level
3195    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3196    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3197
3198    starttime = fid.starttime[0]
3199    volumes = fid.variables['volumes'][:] #Connectivity
3200    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3201
3202    conserved_quantities = []
3203    interpolated_quantities = {}
3204    other_quantities = []
3205
3206    # get geo_reference
3207    #sww files don't have to have a geo_ref
3208    try:
3209        geo_reference = Geo_reference(NetCDFObject=fid)
3210    except: #AttributeError, e:
3211        geo_reference = None
3212
3213    if verbose: print '    getting quantities'
3214    for quantity in fid.variables.keys():
3215        dimensions = fid.variables[quantity].dimensions
3216        if 'number_of_timesteps' in dimensions:
3217            conserved_quantities.append(quantity)
3218            interpolated_quantities[quantity]=\
3219                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3220        else: other_quantities.append(quantity)
3221
3222    other_quantities.remove('x')
3223    other_quantities.remove('y')
3224    other_quantities.remove('z')
3225    other_quantities.remove('volumes')
3226
3227    conserved_quantities.remove('time')
3228
3229    if verbose: print '    building domain'
3230    #    From domain.Domain:
3231    #    domain = Domain(coordinates, volumes,\
3232    #                    conserved_quantities = conserved_quantities,\
3233    #                    other_quantities = other_quantities,zone=zone,\
3234    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3235
3236    #   From shallow_water.Domain:
3237    coordinates=coordinates.tolist()
3238    volumes=volumes.tolist()
3239    #FIXME:should this be in mesh?(peter row)
3240    if fid.smoothing == 'Yes': unique = False
3241    else: unique = True
3242    if unique:
3243        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3244
3245
3246    try:
3247        domain = Domain(coordinates, volumes, boundary)
3248    except AssertionError, e:
3249        fid.close()
3250        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3251        raise DataDomainError, msg
3252
3253    if not boundary is None:
3254        domain.boundary = boundary
3255
3256    domain.geo_reference = geo_reference
3257
3258    domain.starttime=float(starttime)+float(t)
3259    domain.time=0.0
3260
3261    for quantity in other_quantities:
3262        try:
3263            NaN = fid.variables[quantity].missing_value
3264        except:
3265            pass #quantity has no missing_value number
3266        X = fid.variables[quantity][:]
3267        if very_verbose:
3268            print '       ',quantity
3269            print '        NaN =',NaN
3270            print '        max(X)'
3271            print '       ',max(X)
3272            print '        max(X)==NaN'
3273            print '       ',max(X)==NaN
3274            print ''
3275        if (max(X)==NaN) or (min(X)==NaN):
3276            if fail_if_NaN:
3277                msg = 'quantity "%s" contains no_data entry'%quantity
3278                raise DataMissingValuesError, msg
3279            else:
3280                data = (X<>NaN)
3281                X = (X*data)+(data==0)*NaN_filler
3282        if unique:
3283            X = resize(X,(len(X)/3,3))
3284        domain.set_quantity(quantity,X)
3285    #
3286    for quantity in conserved_quantities:
3287        try:
3288            NaN = fid.variables[quantity].missing_value
3289        except:
3290            pass #quantity has no missing_value number
3291        X = interpolated_quantities[quantity]
3292        if very_verbose:
3293            print '       ',quantity
3294            print '        NaN =',NaN
3295            print '        max(X)'
3296            print '       ',max(X)
3297            print '        max(X)==NaN'
3298            print '       ',max(X)==NaN
3299            print ''
3300        if (max(X)==NaN) or (min(X)==NaN):
3301            if fail_if_NaN:
3302                msg = 'quantity "%s" contains no_data entry'%quantity
3303                raise DataMissingValuesError, msg
3304            else:
3305                data = (X<>NaN)
3306                X = (X*data)+(data==0)*NaN_filler
3307        if unique:
3308            X = resize(X,(X.shape[0]/3,3))
3309        domain.set_quantity(quantity,X)
3310
3311    fid.close()
3312    return domain
3313
3314def interpolated_quantity(saved_quantity,time_interp):
3315
3316    #given an index and ratio, interpolate quantity with respect to time.
3317    index,ratio = time_interp
3318    Q = saved_quantity
3319    if ratio > 0:
3320        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3321    else:
3322        q = Q[index]
3323    #Return vector of interpolated values
3324    return q
3325
3326def get_time_interp(time,t=None):
3327    #Finds the ratio and index for time interpolation.
3328    #It is borrowed from previous pyvolution code.
3329    if t is None:
3330        t=time[-1]
3331        index = -1
3332        ratio = 0.
3333    else:
3334        T = time
3335        tau = t
3336        index=0
3337        msg = 'Time interval derived from file %s [%s:%s]'\
3338            %('FIXMEfilename', T[0], T[-1])
3339        msg += ' does not match model time: %s' %tau
3340        if tau < time[0]: raise DataTimeError, msg
3341        if tau > time[-1]: raise DataTimeError, msg
3342        while tau > time[index]: index += 1
3343        while tau < time[index]: index -= 1
3344        if tau == time[index]:
3345            #Protect against case where tau == time[-1] (last time)
3346            # - also works in general when tau == time[i]
3347            ratio = 0
3348        else:
3349            #t is now between index and index+1
3350            ratio = (tau - time[index])/(time[index+1] - time[index])
3351    return (index,ratio)
3352
3353
3354def weed(coordinates,volumes,boundary = None):
3355    if type(coordinates)=='array':
3356        coordinates = coordinates.tolist()
3357    if type(volumes)=='array':
3358        volumes = volumes.tolist()
3359
3360    unique = False
3361    point_dict = {}
3362    same_point = {}
3363    for i in range(len(coordinates)):
3364        point = tuple(coordinates[i])
3365        if point_dict.has_key(point):
3366            unique = True
3367            same_point[i]=point
3368            #to change all point i references to point j
3369        else:
3370            point_dict[point]=i
3371            same_point[i]=point
3372
3373    coordinates = []
3374    i = 0
3375    for point in point_dict.keys():
3376        point = tuple(point)
3377        coordinates.append(list(point))
3378        point_dict[point]=i
3379        i+=1
3380
3381
3382    for volume in volumes:
3383        for i in range(len(volume)):
3384            index = volume[i]
3385            if index>-1:
3386                volume[i]=point_dict[same_point[index]]
3387
3388    new_boundary = {}
3389    if not boundary is None:
3390        for segment in boundary.keys():
3391            point0 = point_dict[same_point[segment[0]]]
3392            point1 = point_dict[same_point[segment[1]]]
3393            label = boundary[segment]
3394            #FIXME should the bounday attributes be concaterated
3395            #('exterior, pond') or replaced ('pond')(peter row)
3396
3397            if new_boundary.has_key((point0,point1)):
3398                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3399                                              #+','+label
3400
3401            elif new_boundary.has_key((point1,point0)):
3402                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3403                                              #+','+label
3404            else: new_boundary[(point0,point1)]=label
3405
3406        boundary = new_boundary
3407
3408    return coordinates,volumes,boundary
3409
3410
3411def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3412                 verbose=False):
3413    """Read Digitial Elevation model from the following NetCDF format (.dem)
3414
3415    Example:
3416
3417    ncols         3121
3418    nrows         1800
3419    xllcorner     722000
3420    yllcorner     5893000
3421    cellsize      25
3422    NODATA_value  -9999
3423    138.3698 137.4194 136.5062 135.5558 ..........
3424
3425    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3426    """
3427
3428    import os
3429    from Scientific.IO.NetCDF import NetCDFFile
3430    from Numeric import Float, zeros, sum, reshape, equal
3431
3432    root = basename_in
3433    inname = root + '.dem'
3434
3435    #Open existing netcdf file to read
3436    infile = NetCDFFile(inname, 'r')
3437    if verbose: print 'Reading DEM from %s' %inname
3438
3439    #Read metadata
3440    ncols = infile.ncols[0]
3441    nrows = infile.nrows[0]
3442    xllcorner = infile.xllcorner[0]
3443    yllcorner = infile.yllcorner[0]
3444    cellsize = infile.cellsize[0]
3445    NODATA_value = infile.NODATA_value[0]
3446    zone = infile.zone[0]
3447    false_easting = infile.false_easting[0]
3448    false_northing = infile.false_northing[0]
3449    projection = infile.projection
3450    datum = infile.datum
3451    units = infile.units
3452
3453    dem_elevation = infile.variables['elevation']
3454
3455    #Get output file name
3456    if basename_out == None:
3457        outname = root + '_' + repr(cellsize_new) + '.dem'
3458    else:
3459        outname = basename_out + '.dem'
3460
3461    if verbose: print 'Write decimated NetCDF file to %s' %outname
3462
3463    #Determine some dimensions for decimated grid
3464    (nrows_stencil, ncols_stencil) = stencil.shape
3465    x_offset = ncols_stencil / 2
3466    y_offset = nrows_stencil / 2
3467    cellsize_ratio = int(cellsize_new / cellsize)
3468    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3469    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3470
3471    #Open netcdf file for output
3472    outfile = NetCDFFile(outname, 'w')
3473
3474    #Create new file
3475    outfile.institution = 'Geoscience Australia'
3476    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3477                           'of spatial point data'
3478    #Georeferencing
3479    outfile.zone = zone
3480    outfile.projection = projection
3481    outfile.datum = datum
3482    outfile.units = units
3483
3484    outfile.cellsize = cellsize_new
3485    outfile.NODATA_value = NODATA_value
3486    outfile.false_easting = false_easting
3487    outfile.false_northing = false_northing
3488
3489    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3490    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3491    outfile.ncols = ncols_new
3492    outfile.nrows = nrows_new
3493
3494    # dimension definition
3495    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3496
3497    # variable definition
3498    outfile.createVariable('elevation', Float, ('number_of_points',))
3499
3500    # Get handle to the variable
3501    elevation = outfile.variables['elevation']
3502
3503    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3504
3505    #Store data
3506    global_index = 0
3507    for i in range(nrows_new):
3508        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3509        lower_index = global_index
3510        telev =  zeros(ncols_new, Float)
3511        local_index = 0
3512        trow = i * cellsize_ratio
3513
3514        for j in range(ncols_new):
3515            tcol = j * cellsize_ratio
3516            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3517
3518            #if dem contains 1 or more NODATA_values set value in
3519            #decimated dem to NODATA_value, else compute decimated
3520            #value using stencil
3521            if sum(sum(equal(tmp, NODATA_value))) > 0:
3522                telev[local_index] = NODATA_value
3523            else:
3524                telev[local_index] = sum(sum(tmp * stencil))
3525
3526            global_index += 1
3527            local_index += 1
3528
3529        upper_index = global_index
3530
3531        elevation[lower_index:upper_index] = telev
3532
3533    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3534
3535    infile.close()
3536    outfile.close()
3537
3538
3539
3540
3541def tsh2sww(filename, verbose=False): #test_tsh2sww
3542    """
3543    to check if a tsh/msh file 'looks' good.
3544    """
3545
3546    from shallow_water import Domain
3547    from anuga.pyvolution.pmesh2domain import pmesh_to_domain_instance
3548    import time, os
3549    from anuga.pyvolution.data_manager import get_dataobject
3550    from os import sep, path
3551    from anuga.utilities.numerical_tools import mean
3552
3553    if verbose == True:print 'Creating domain from', filename
3554    domain = pmesh_to_domain_instance(filename, Domain)
3555    if verbose == True:print "Number of triangles = ", len(domain)
3556
3557    domain.smooth = True
3558    domain.format = 'sww'   #Native netcdf visualisation format
3559    file_path, filename = path.split(filename)
3560    filename, ext = path.splitext(filename)
3561    domain.filename = filename
3562    domain.reduction = mean
3563    if verbose == True:print "file_path",file_path
3564    if file_path == "":file_path = "."
3565    domain.set_datadir(file_path)
3566
3567    if verbose == True:
3568        print "Output written to " + domain.get_datadir() + sep + \
3569              domain.filename + "." + domain.format
3570    sww = get_dataobject(domain)
3571    sww.store_connectivity()
3572    sww.store_timestep('stage')
3573
3574
3575def asc_csiro2sww(bath_dir,
3576                  elevation_dir,
3577                  ucur_dir,
3578                  vcur_dir,
3579                  sww_file,
3580                  minlat = None, maxlat = None,
3581                  minlon = None, maxlon = None,
3582                  zscale=1,
3583                  mean_stage = 0,
3584                  fail_on_NaN = True,
3585                  elevation_NaN_filler = 0,
3586                  bath_prefix='ba',
3587                  elevation_prefix='el',
3588                  verbose=False):
3589    """
3590    Produce an sww boundary file, from esri ascii data from CSIRO.
3591
3592    Also convert latitude and longitude to UTM. All coordinates are
3593    assumed to be given in the GDA94 datum.
3594
3595    assume:
3596    All files are in esri ascii format
3597
3598    4 types of information
3599    bathymetry
3600    elevation
3601    u velocity
3602    v velocity
3603
3604    Assumptions
3605    The metadata of all the files is the same
3606    Each type is in a seperate directory
3607    One bath file with extention .000
3608    The time period is less than 24hrs and uniform.
3609    """
3610    from Scientific.IO.NetCDF import NetCDFFile
3611
3612    from anuga.coordinate_transforms.redfearn import redfearn
3613
3614    precision = Float # So if we want to change the precision its done here
3615
3616    # go in to the bath dir and load the only file,
3617    bath_files = os.listdir(bath_dir)
3618    #print "bath_files",bath_files
3619
3620    #fixme: make more general?
3621    bath_file = bath_files[0]
3622    bath_dir_file =  bath_dir + os.sep + bath_file
3623    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3624    #print "bath_metadata",bath_metadata
3625    #print "bath_grid",bath_grid
3626
3627    #Use the date.time of the bath file as a basis for
3628    #the start time for other files
3629    base_start = bath_file[-12:]
3630
3631    #go into the elevation dir and load the 000 file
3632    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3633                         + base_start
3634    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3635    #print "elevation_dir_file",elevation_dir_file
3636    #print "elevation_grid", elevation_grid
3637
3638    elevation_files = os.listdir(elevation_dir)
3639    ucur_files = os.listdir(ucur_dir)
3640    vcur_files = os.listdir(vcur_dir)
3641
3642    # the first elevation file should be the
3643    # file with the same base name as the bath data
3644    #print "elevation_files[0]",elevation_files[0]
3645    #print "'el' + base_start",'el' + base_start
3646    assert elevation_files[0] == 'el' + base_start
3647
3648    #assert bath_metadata == elevation_metadata
3649
3650
3651
3652    number_of_latitudes = bath_grid.shape[0]
3653    number_of_longitudes = bath_grid.shape[1]
3654    #number_of_times = len(os.listdir(elevation_dir))
3655    #number_of_points = number_of_latitudes*number_of_longitudes
3656    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3657
3658    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3659                  for x in range(number_of_longitudes)]
3660    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3661                 for y in range(number_of_latitudes)]
3662
3663     # reverse order of lat, so the fist lat represents the first grid row
3664    latitudes.reverse()
3665
3666    #print "latitudes - before _get_min_max_indexes",latitudes
3667    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3668                                                 minlat=minlat, maxlat=maxlat,
3669                                                 minlon=minlon, maxlon=maxlon)
3670
3671
3672    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3673    #print "bath_grid",bath_grid
3674    latitudes = latitudes[kmin:kmax]
3675    longitudes = longitudes[lmin:lmax]
3676    number_of_latitudes = len(latitudes)
3677    number_of_longitudes = len(longitudes)
3678    number_of_times = len(os.listdir(elevation_dir))
3679    number_of_points = number_of_latitudes*number_of_longitudes
3680    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3681    #print "number_of_points",number_of_points
3682
3683    #Work out the times
3684    if len(elevation_files) > 1:
3685        # Assume: The time period is less than 24hrs.
3686        time_period = (int(elevation_files[1][-3:]) - \
3687                      int(elevation_files[0][-3:]))*60*60
3688        times = [x*time_period for x in range(len(elevation_files))]
3689    else:
3690        times = [0.0]
3691    #print "times", times
3692    #print "number_of_latitudes", number_of_latitudes
3693    #print "number_of_longitudes", number_of_longitudes
3694    #print "number_of_times", number_of_times
3695    #print "latitudes", latitudes
3696    #print "longitudes", longitudes
3697
3698
3699    if verbose:
3700        print '------------------------------------------------'
3701        print 'Statistics:'
3702        print '  Extent (lat/lon):'
3703        print '    lat in [%f, %f], len(lat) == %d'\
3704              %(min(latitudes), max(latitudes),
3705                len(latitudes))
3706        print '    lon in [%f, %f], len(lon) == %d'\
3707              %(min(longitudes), max(longitudes),
3708                len(longitudes))
3709        print '    t in [%f, %f], len(t) == %d'\
3710              %(min(times), max(times), len(times))
3711
3712    ######### WRITE THE SWW FILE #############
3713    # NetCDF file definition
3714    outfile = NetCDFFile(sww_file, 'w')
3715
3716    #Create new file
3717    outfile.institution = 'Geoscience Australia'
3718    outfile.description = 'Converted from XXX'
3719
3720
3721    #For sww compatibility
3722    outfile.smoothing = 'Yes'
3723    outfile.order = 1
3724
3725    #Start time in seconds since the epoch (midnight 1/1/1970)
3726    outfile.starttime = starttime = times[0]
3727
3728
3729    # dimension definitions
3730    outfile.createDimension('number_of_volumes', number_of_volumes)
3731
3732    outfile.createDimension('number_of_vertices', 3)
3733    outfile.createDimension('number_of_points', number_of_points)
3734    outfile.createDimension('number_of_timesteps', number_of_times)
3735
3736    # variable definitions
3737    outfile.createVariable('x', precision, ('number_of_points',))
3738    outfile.createVariable('y', precision, ('number_of_points',))
3739    outfile.createVariable('elevation', precision, ('number_of_points',))
3740
3741    #FIXME: Backwards compatibility
3742    outfile.createVariable('z', precision, ('number_of_points',))
3743    #################################
3744
3745    outfile.createVariable('volumes', Int, ('number_of_volumes',
3746                                            'number_of_vertices'))
3747
3748    outfile.createVariable('time', precision,
3749                           ('number_of_timesteps',))
3750
3751    outfile.createVariable('stage', precision,
3752                           ('number_of_timesteps',
3753                            'number_of_points'))
3754
3755    outfile.createVariable('xmomentum', precision,
3756                           ('number_of_timesteps',
3757                            'number_of_points'))
3758
3759    outfile.createVariable('ymomentum', precision,
3760                           ('number_of_timesteps',
3761                            'number_of_points'))
3762
3763    #Store
3764    from anuga.coordinate_transforms.redfearn import redfearn
3765    x = zeros(number_of_points, Float)  #Easting
3766    y = zeros(number_of_points, Float)  #Northing
3767
3768    if verbose: print 'Making triangular grid'
3769    #Get zone of 1st point.
3770    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3771
3772    vertices = {}
3773    i = 0
3774    for k, lat in enumerate(latitudes):
3775        for l, lon in enumerate(longitudes):
3776
3777            vertices[l,k] = i
3778
3779            zone, easting, northing = redfearn(lat,lon)
3780
3781            msg = 'Zone boundary crossed at longitude =', lon
3782            #assert zone == refzone, msg
3783            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3784            x[i] = easting
3785            y[i] = northing
3786            i += 1
3787
3788
3789    #Construct 2 triangles per 'rectangular' element
3790    volumes = []
3791    for l in range(number_of_longitudes-1):    #X direction
3792        for k in range(number_of_latitudes-1): #Y direction
3793            v1 = vertices[l,k+1]
3794            v2 = vertices[l,k]
3795            v3 = vertices[l+1,k+1]
3796            v4 = vertices[l+1,k]
3797
3798            #Note, this is different to the ferrit2sww code
3799            #since the order of the lats is reversed.
3800            volumes.append([v1,v3,v2]) #Upper element
3801            volumes.append([v4,v2,v3]) #Lower element
3802
3803    volumes = array(volumes)
3804
3805    geo_ref = Geo_reference(refzone,min(x),min(y))
3806    geo_ref.write_NetCDF(outfile)
3807
3808    # This will put the geo ref in the middle
3809    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3810
3811
3812    if verbose:
3813        print '------------------------------------------------'
3814        print 'More Statistics:'
3815        print '  Extent (/lon):'
3816        print '    x in [%f, %f], len(lat) == %d'\
3817              %(min(x), max(x),
3818                len(x))
3819        print '    y in [%f, %f], len(lon) == %d'\
3820              %(min(y), max(y),
3821                len(y))
3822        print 'geo_ref: ',geo_ref
3823
3824    z = resize(bath_grid,outfile.variables['z'][:].shape)
3825    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3826    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3827    outfile.variables['z'][:] = z
3828    outfile.variables['elevation'][:] = z  #FIXME HACK
3829    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3830
3831    # do this to create an ok sww file.
3832    #outfile.variables['time'][:] = [0]   #Store time relative
3833    #outfile.variables['stage'] = z
3834    # put the next line up in the code after outfile.order = 1
3835    #number_of_times = 1
3836
3837    stage = outfile.variables['stage']
3838    xmomentum = outfile.variables['xmomentum']
3839    ymomentum = outfile.variables['ymomentum']
3840
3841    outfile.variables['time'][:] = times   #Store time relative
3842
3843    if verbose: print 'Converting quantities'
3844    n = number_of_times
3845    for j in range(number_of_times):
3846        # load in files
3847        elevation_meta, elevation_grid = \
3848           _read_asc(elevation_dir + os.sep + elevation_files[j])
3849
3850        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3851        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3852
3853        #print "elevation_grid",elevation_grid
3854        #cut matrix to desired size
3855        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3856        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3857        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3858        #print "elevation_grid",elevation_grid
3859        # handle missing values
3860        missing = (elevation_grid == elevation_meta['NODATA_value'])
3861        if sometrue (missing):
3862            if fail_on_NaN:
3863                msg = 'File %s contains missing values'\
3864                      %(elevation_files[j])
3865                raise DataMissingValuesError, msg
3866            else:
3867                elevation_grid = elevation_grid*(missing==0) + \
3868                                 missing*elevation_NaN_filler
3869
3870
3871        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3872        i = 0
3873        for k in range(number_of_latitudes):      #Y direction
3874            for l in range(number_of_longitudes): #X direction
3875                w = zscale*elevation_grid[k,l] + mean_stage
3876                stage[j,i] = w
3877                h = w - z[i]
3878                xmomentum[j,i] = u_momentum_grid[k,l]*h
3879                ymomentum[j,i] = v_momentum_grid[k,l]*h
3880                i += 1
3881    outfile.close()
3882
3883def _get_min_max_indexes(latitudes,longitudes,
3884                        minlat=None, maxlat=None,
3885                        minlon=None, maxlon=None):
3886    """
3887    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3888    specified with min/max lat/long
3889
3890    Think of the latitudes and longitudes describing a 2d surface.
3891    The area returned is, if possible, just big enough to cover the
3892    inputed max/min area. (This will not be possible if the max/min area
3893    has a section outside of the latitudes/longitudes area.)
3894
3895    assume latitudess & longitudes are sorted,
3896    long - from low to high (west to east, eg 148 - 151)
3897    lat - from high to low (north to south, eg -35 - -38)
3898    """
3899
3900    # reverse order of lat, so it's in ascending order
3901    latitudes.reverse()
3902    largest_lat_index = len(latitudes)-1
3903    #Cut out a smaller extent.
3904    if minlat == None:
3905        lat_min_index = 0
3906    else:
3907        lat_min_index = searchsorted(latitudes, minlat)-1
3908        if lat_min_index <0:
3909            lat_min_index = 0
3910
3911
3912    if maxlat == None:
3913        lat_max_index = largest_lat_index #len(latitudes)
3914    else:
3915        lat_max_index = searchsorted(latitudes, maxlat)
3916        if lat_max_index > largest_lat_index:
3917            lat_max_index = largest_lat_index
3918
3919    if minlon == None:
3920        lon_min_index = 0
3921    else:
3922        lon_min_index = searchsorted(longitudes, minlon)-1
3923        if lon_min_index <0:
3924            lon_min_index = 0
3925
3926    if maxlon == None:
3927        lon_max_index = len(longitudes)
3928    else:
3929        lon_max_index = searchsorted(longitudes, maxlon)
3930
3931    #Take into account that the latitude list was reversed
3932    latitudes.reverse() # Python passes by reference, need to swap back
3933    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3934                                   largest_lat_index - lat_min_index
3935    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3936    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3937
3938    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3939
3940
3941def _read_asc(filename, verbose=False):
3942    """Read esri file from the following ASCII format (.asc)
3943
3944    Example:
3945
3946    ncols         3121
3947    nrows         1800
3948    xllcorner     722000
3949    yllcorner     5893000
3950    cellsize      25
3951    NODATA_value  -9999
3952    138.3698 137.4194 136.5062 135.5558 ..........
3953
3954    """
3955
3956    datafile = open(filename)
3957
3958    if verbose: print 'Reading DEM from %s' %(filename)
3959    lines = datafile.readlines()
3960    datafile.close()
3961
3962    if verbose: print 'Got', len(lines), ' lines'
3963
3964    ncols = int(lines.pop(0).split()[1].strip())
3965    nrows = int(lines.pop(0).split()[1].strip())
3966    xllcorner = float(lines.pop(0).split()[1].strip())
3967    yllcorner = float(lines.pop(0).split()[1].strip())
3968    cellsize = float(lines.pop(0).split()[1].strip())
3969    NODATA_value = float(lines.pop(0).split()[1].strip())
3970
3971    assert len(lines) == nrows
3972
3973    #Store data
3974    grid = []
3975
3976    n = len(lines)
3977    for i, line in enumerate(lines):
3978        cells = line.split()
3979        assert len(cells) == ncols
3980        grid.append(array([float(x) for x in cells]))
3981    grid = array(grid)
3982
3983    return {'xllcorner':xllcorner,
3984            'yllcorner':yllcorner,
3985            'cellsize':cellsize,
3986            'NODATA_value':NODATA_value}, grid
3987
3988
3989# FIXME (Ole): Is this doing anything at all?
3990def sww2timeseries(swwfile,
3991                   gauge_filename,
3992                   gauge_data_outname,
3993                   quantity = None,
3994                   time_min = None,
3995                   time_max = None,
3996                   verbose = False):
3997
3998    """Read SWW file and extract time series for prescribed quantities at
3999    gauge locations.
4000
4001    The gauge locations are defined in gauge_filename. This file should be
4002    in the form: gaugename, easting, northing, and should be stored in a
4003    .csv or .xya file.
4004
4005    Time series data at the gauges can be written to file (for example,
4006    Benfield requested raw data) with the default being no data written.
4007
4008    The parameter quantity must be the name of an existing quantity or
4009    an expression involving existing quantities. The default is
4010    'depth'.
4011
4012    The user can define a list of quantities. The possibilities are
4013    the conserved quantitues of the shallow water wave equation and other
4014    quantities which can be derived from those, i.e.
4015    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
4016
4017    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
4018    Note, units of momentum are m^2/s and depth is m.
4019
4020    Velocity is absolute momentum divided by depth. (Confirming correct units:
4021    vel = abs mom / depth = (m^2/s)/m = m/s.)
4022
4023    Bearing returns the angle of the velocity vector from North.
4024
4025    If time_min and time_max is given, output plotted in that time range.
4026    The default is to plot the entire range of the time evident in sww file.
4027
4028    The export graphic format in 'png' and will be stored in the same
4029    directory as the input file.
4030    """
4031
4032    if quantity is None: quantity = 'depth'
4033
4034    # extract gauge locations from gauge file
4035
4036    # extract all quantities from swwfile (f = file_function)
4037    if time_min is None: time_min = min(f.get_time())
4038    if time_max is None: time_max = max(f.get_time())
4039
4040    # loop through appropriate range of time
4041    # plot prescribed quantities and export data if requested
4042
4043    #if gauge_data_outname is None:
4044
4045    return
4046
4047#-------------------------------------------------------------
4048if __name__ == "__main__":
4049    pass
4050
Note: See TracBrowser for help on using the repository browser.