source: inundation/pyvolution/data_manager.py @ 3448

Last change on this file since 3448 was 3437, checked in by duncan, 19 years ago

method name change - auto_segment

File size: 122.3 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62
63from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
64     searchsorted, zeros, allclose, around
65
66
67from coordinate_transforms.geo_reference import Geo_reference
68from geospatial_data.geospatial_data import Geospatial_data
69
70
71def make_filename(s):
72    """Transform argument string into a suitable filename
73    """
74
75    s = s.strip()
76    s = s.replace(' ', '_')
77    s = s.replace('(', '')
78    s = s.replace(')', '')
79    s = s.replace('__', '_')
80
81    return s
82
83
84def check_dir(path, verbose=None):
85    """Check that specified path exists.
86    If path does not exist it will be created if possible
87
88    USAGE:
89       checkdir(path, verbose):
90
91    ARGUMENTS:
92        path -- Directory
93        verbose -- Flag verbose output (default: None)
94
95    RETURN VALUE:
96        Verified path including trailing separator
97
98    """
99
100    import os, sys
101    import os.path
102
103    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
104        unix = 0
105    else:
106        unix = 1
107
108
109    if path[-1] != os.sep:
110        path = path + os.sep  # Add separator for directories
111
112    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
113    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
114        try:
115            exitcode=os.mkdir(path)
116
117            # Change access rights if possible
118            #
119            if unix:
120                exitcode=os.system('chmod 775 '+path)
121            else:
122                pass  # FIXME: What about acces rights under Windows?
123
124            if verbose: print 'MESSAGE: Directory', path, 'created.'
125
126        except:
127            print 'WARNING: Directory', path, 'could not be created.'
128            if unix:
129                path = '/tmp/'
130            else:
131                path = 'C:'
132
133            print 'Using directory %s instead' %path
134
135    return(path)
136
137
138
139def del_dir(path):
140    """Recursively delete directory path and all its contents
141    """
142
143    import os
144
145    if os.path.isdir(path):
146        for file in os.listdir(path):
147            X = os.path.join(path, file)
148
149
150            if os.path.isdir(X) and not os.path.islink(X):
151                del_dir(X)
152            else:
153                try:
154                    os.remove(X)
155                except:
156                    print "Could not remove file %s" %X
157
158        os.rmdir(path)
159
160
161
162def create_filename(datadir, filename, format, size=None, time=None):
163
164    import os
165    #from config import data_dir
166
167    FN = check_dir(datadir) + filename
168
169    if size is not None:
170        FN += '_size%d' %size
171
172    if time is not None:
173        FN += '_time%.2f' %time
174
175    FN += '.' + format
176    return FN
177
178
179def get_files(datadir, filename, format, size):
180    """Get all file (names) with gven name, size and format
181    """
182
183    import glob
184
185    import os
186    #from config import data_dir
187
188    dir = check_dir(datadir)
189
190    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
191    return glob.glob(pattern)
192
193
194
195#Generic class for storing output to e.g. visualisation or checkpointing
196class Data_format:
197    """Generic interface to data formats
198    """
199
200
201    def __init__(self, domain, extension, mode = 'w'):
202        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
203                                        '''   'w' (write)'''+\
204                                        '''   'r' (read)''' +\
205                                        '''   'a' (append)'''
206
207        #Create filename
208        #self.filename = create_filename(domain.get_datadir(),
209        #domain.get_name(), extension, len(domain))
210
211
212        self.filename = create_filename(domain.get_datadir(),
213            domain.get_name(), extension)
214
215        #print 'F', self.filename
216        self.timestep = 0
217        self.number_of_volumes = len(domain)
218        self.domain = domain
219
220
221        #FIXME: Should we have a general set_precision function?
222
223
224
225#Class for storing output to e.g. visualisation
226class Data_format_sww(Data_format):
227    """Interface to native NetCDF format (.sww) for storing model output
228
229    There are two kinds of data
230
231    1: Constant data: Vertex coordinates and field values. Stored once
232    2: Variable data: Conserved quantities. Stored once per timestep.
233
234    All data is assumed to reside at vertex locations.
235    """
236
237
238    def __init__(self, domain, mode = 'w',\
239                 max_size = 2000000000,
240                 recursion = False):
241        from Scientific.IO.NetCDF import NetCDFFile
242        from Numeric import Int, Float, Float32
243
244        self.precision = Float32 #Use single precision for quantities
245        if hasattr(domain, 'max_size'):
246            self.max_size = domain.max_size #file size max is 2Gig
247        else:
248            self.max_size = max_size
249        self.recursion = recursion
250        self.mode = mode
251
252        Data_format.__init__(self, domain, 'sww', mode)
253
254
255        # NetCDF file definition
256        fid = NetCDFFile(self.filename, mode)
257
258        if mode == 'w':
259
260            #Create new file
261            fid.institution = 'Geoscience Australia'
262            fid.description = 'Output from pyvolution suitable for plotting'
263
264            if domain.smooth:
265                fid.smoothing = 'Yes'
266            else:
267                fid.smoothing = 'No'
268
269            fid.order = domain.default_order
270
271            if hasattr(domain, 'texture'):
272                fid.texture = domain.texture
273        #else:
274            #    fid.texture = 'None'
275
276            #Reference point
277            #Start time in seconds since the epoch (midnight 1/1/1970)
278            #FIXME: Use Georef
279            fid.starttime = domain.starttime
280
281            # dimension definitions
282            fid.createDimension('number_of_volumes', self.number_of_volumes)
283            fid.createDimension('number_of_vertices', 3)
284
285            if domain.smooth is True:
286                fid.createDimension('number_of_points', len(domain.vertexlist))
287            else:
288                fid.createDimension('number_of_points', 3*self.number_of_volumes)
289
290            fid.createDimension('number_of_timesteps', None) #extensible
291
292            # variable definitions
293            fid.createVariable('x', self.precision, ('number_of_points',))
294            fid.createVariable('y', self.precision, ('number_of_points',))
295            fid.createVariable('elevation', self.precision, ('number_of_points',))
296            if domain.geo_reference is not None:
297                domain.geo_reference.write_NetCDF(fid)
298
299            #FIXME: Backwards compatibility
300            fid.createVariable('z', self.precision, ('number_of_points',))
301            #################################
302
303            fid.createVariable('volumes', Int, ('number_of_volumes',
304                                                'number_of_vertices'))
305
306            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
307                                               # close to each other may appear as the same step
308                               ('number_of_timesteps',))
309
310            fid.createVariable('stage', self.precision,
311                               ('number_of_timesteps',
312                                'number_of_points'))
313
314            fid.createVariable('xmomentum', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318            fid.createVariable('ymomentum', self.precision,
319                               ('number_of_timesteps',
320                                'number_of_points'))
321
322        #Close
323        fid.close()
324
325
326    def store_connectivity(self):
327        """Specialisation of store_connectivity for net CDF format
328
329        Writes x,y,z coordinates of triangles constituting
330        the bed elevation.
331        """
332
333        from Scientific.IO.NetCDF import NetCDFFile
334
335        from Numeric import concatenate, Int
336
337        domain = self.domain
338
339        #Get NetCDF
340        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
341
342        # Get the variables
343        x = fid.variables['x']
344        y = fid.variables['y']
345        z = fid.variables['elevation']
346
347        volumes = fid.variables['volumes']
348
349        # Get X, Y and bed elevation Z
350        Q = domain.quantities['elevation']
351        X,Y,Z,V = Q.get_vertex_values(xy=True,
352                      precision = self.precision)
353
354
355
356        x[:] = X.astype(self.precision)
357        y[:] = Y.astype(self.precision)
358        z[:] = Z.astype(self.precision)
359
360        #FIXME: Backwards compatibility
361        z = fid.variables['z']
362        z[:] = Z.astype(self.precision)
363        ################################
364
365        volumes[:] = V.astype(volumes.typecode())
366
367        #Close
368        fid.close()
369
370    def store_timestep(self, names):
371        """Store time and named quantities to file
372        """
373        from Scientific.IO.NetCDF import NetCDFFile
374        import types
375        from time import sleep
376        from os import stat
377
378        minimum_allowed_depth = 0.001
379        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
380        from Numeric import choose
381
382        #Get NetCDF
383        retries = 0
384        file_open = False
385        while not file_open and retries < 10:
386            try:
387                fid = NetCDFFile(self.filename, 'a') #Open existing file
388            except IOError:
389                #This could happen if someone was reading the file.
390                #In that case, wait a while and try again
391                msg = 'Warning (store_timestep): File %s could not be opened'\
392                      %self.filename
393                msg += ' - trying step %s again' %self.domain.time
394                print msg
395                retries += 1
396                sleep(1)
397            else:
398                file_open = True
399
400        if not file_open:
401            msg = 'File %s could not be opened for append' %self.filename
402            raise DataFileNotOpenError, msg
403
404
405
406        #Check to see if the file is already too big:
407        time = fid.variables['time']
408        i = len(time)+1
409        file_size = stat(self.filename)[6]
410        file_size_increase =  file_size/i
411        if file_size + file_size_increase > self.max_size*(2**self.recursion):
412            #in order to get the file name and start time correct,
413            #I change the domian.filename and domain.starttime.
414            #This is the only way to do this without changing
415            #other modules (I think).
416
417            #write a filename addon that won't break swollens reader
418            #(10.sww is bad)
419            filename_ext = '_time_%s'%self.domain.time
420            filename_ext = filename_ext.replace('.', '_')
421            #remember the old filename, then give domain a
422            #name with the extension
423            old_domain_filename = self.domain.filename
424            if not self.recursion:
425                self.domain.filename = self.domain.filename+filename_ext
426
427            #change the domain starttime to the current time
428            old_domain_starttime = self.domain.starttime
429            self.domain.starttime = self.domain.time
430
431            #build a new data_structure.
432            next_data_structure=\
433                Data_format_sww(self.domain, mode=self.mode,\
434                                max_size = self.max_size,\
435                                recursion = self.recursion+1)
436            if not self.recursion:
437                print '    file_size = %s'%file_size
438                print '    saving file to %s'%next_data_structure.filename
439            #set up the new data_structure
440            self.domain.writer = next_data_structure
441
442            #FIXME - could be cleaner to use domain.store_timestep etc.
443            next_data_structure.store_connectivity()
444            next_data_structure.store_timestep(names)
445            fid.sync()
446            fid.close()
447
448            #restore the old starttime and filename
449            self.domain.starttime = old_domain_starttime
450            self.domain.filename = old_domain_filename
451        else:
452            self.recursion = False
453            domain = self.domain
454
455            # Get the variables
456            time = fid.variables['time']
457            stage = fid.variables['stage']
458            xmomentum = fid.variables['xmomentum']
459            ymomentum = fid.variables['ymomentum']
460            i = len(time)
461
462            #Store time
463            time[i] = self.domain.time
464
465
466            if type(names) not in [types.ListType, types.TupleType]:
467                names = [names]
468
469            for name in names:
470                # Get quantity
471                Q = domain.quantities[name]
472                A,V = Q.get_vertex_values(xy = False,
473                                          precision = self.precision)
474
475                #FIXME: Make this general (see below)
476                if name == 'stage':
477                    z = fid.variables['elevation']
478                    #print z[:]
479                    #print A-z[:]
480                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
481                    stage[i,:] = A.astype(self.precision)
482                elif name == 'xmomentum':
483                    xmomentum[i,:] = A.astype(self.precision)
484                elif name == 'ymomentum':
485                    ymomentum[i,:] = A.astype(self.precision)
486
487                #As in....
488            #eval( name + '[i,:] = A.astype(self.precision)' )
489            #FIXME: But we need a UNIT test for that before refactoring
490
491
492
493            #Flush and close
494            fid.sync()
495            fid.close()
496
497
498
499#Class for handling checkpoints data
500class Data_format_cpt(Data_format):
501    """Interface to native NetCDF format (.cpt)
502    """
503
504
505    def __init__(self, domain, mode = 'w'):
506        from Scientific.IO.NetCDF import NetCDFFile
507        from Numeric import Int, Float, Float
508
509        self.precision = Float #Use full precision
510
511        Data_format.__init__(self, domain, 'sww', mode)
512
513
514        # NetCDF file definition
515        fid = NetCDFFile(self.filename, mode)
516
517        if mode == 'w':
518            #Create new file
519            fid.institution = 'Geoscience Australia'
520            fid.description = 'Checkpoint data'
521            #fid.smooth = domain.smooth
522            fid.order = domain.default_order
523
524            # dimension definitions
525            fid.createDimension('number_of_volumes', self.number_of_volumes)
526            fid.createDimension('number_of_vertices', 3)
527
528            #Store info at all vertices (no smoothing)
529            fid.createDimension('number_of_points', 3*self.number_of_volumes)
530            fid.createDimension('number_of_timesteps', None) #extensible
531
532            # variable definitions
533
534            #Mesh
535            fid.createVariable('x', self.precision, ('number_of_points',))
536            fid.createVariable('y', self.precision, ('number_of_points',))
537
538
539            fid.createVariable('volumes', Int, ('number_of_volumes',
540                                                'number_of_vertices'))
541
542            fid.createVariable('time', self.precision,
543                               ('number_of_timesteps',))
544
545            #Allocate space for all quantities
546            for name in domain.quantities.keys():
547                fid.createVariable(name, self.precision,
548                                   ('number_of_timesteps',
549                                    'number_of_points'))
550
551        #Close
552        fid.close()
553
554
555    def store_checkpoint(self):
556        """
557        Write x,y coordinates of triangles.
558        Write connectivity (
559        constituting
560        the bed elevation.
561        """
562
563        from Scientific.IO.NetCDF import NetCDFFile
564
565        from Numeric import concatenate
566
567        domain = self.domain
568
569        #Get NetCDF
570        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
571
572        # Get the variables
573        x = fid.variables['x']
574        y = fid.variables['y']
575
576        volumes = fid.variables['volumes']
577
578        # Get X, Y and bed elevation Z
579        Q = domain.quantities['elevation']
580        X,Y,Z,V = Q.get_vertex_values(xy=True,
581                      precision = self.precision)
582
583
584
585        x[:] = X.astype(self.precision)
586        y[:] = Y.astype(self.precision)
587        z[:] = Z.astype(self.precision)
588
589        volumes[:] = V
590
591        #Close
592        fid.close()
593
594
595    def store_timestep(self, name):
596        """Store time and named quantity to file
597        """
598        from Scientific.IO.NetCDF import NetCDFFile
599        from time import sleep
600
601        #Get NetCDF
602        retries = 0
603        file_open = False
604        while not file_open and retries < 10:
605            try:
606                fid = NetCDFFile(self.filename, 'a') #Open existing file
607            except IOError:
608                #This could happen if someone was reading the file.
609                #In that case, wait a while and try again
610                msg = 'Warning (store_timestep): File %s could not be opened'\
611                  %self.filename
612                msg += ' - trying again'
613                print msg
614                retries += 1
615                sleep(1)
616            else:
617                file_open = True
618
619        if not file_open:
620            msg = 'File %s could not be opened for append' %self.filename
621            raise DataFileNotOPenError, msg
622
623
624        domain = self.domain
625
626        # Get the variables
627        time = fid.variables['time']
628        stage = fid.variables['stage']
629        i = len(time)
630
631        #Store stage
632        time[i] = self.domain.time
633
634        # Get quantity
635        Q = domain.quantities[name]
636        A,V = Q.get_vertex_values(xy=False,
637                  precision = self.precision)
638
639        stage[i,:] = A.astype(self.precision)
640
641        #Flush and close
642        fid.sync()
643        fid.close()
644
645
646
647
648
649#Function for storing xya output
650#FIXME Not done yet for this version
651#This is obsolete.  Use geo_spatial_data instead
652class Data_format_xya(Data_format):
653    """Generic interface to data formats
654    """
655
656
657    def __init__(self, domain, mode = 'w'):
658        from Scientific.IO.NetCDF import NetCDFFile
659        from Numeric import Int, Float, Float32
660
661        self.precision = Float32 #Use single precision
662
663        Data_format.__init__(self, domain, 'xya', mode)
664
665
666
667    #FIXME -This is the old xya format
668    def store_all(self):
669        """Specialisation of store all for xya format
670
671        Writes x,y,z coordinates of triangles constituting
672        the bed elevation.
673        """
674
675        from Numeric import concatenate
676
677        domain = self.domain
678
679        fd = open(self.filename, 'w')
680
681
682        if domain.smooth is True:
683            number_of_points =  len(domain.vertexlist)
684        else:
685            number_of_points = 3*self.number_of_volumes
686
687        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
688
689        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
690                 " # <vertex #> <x> <y> [attributes]" + "\n")
691
692
693        # Get X, Y, bed elevation and friction (index=0,1)
694        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
695                                           indices = (0,1), precision = self.precision)
696
697        bed_eles = A[:,0]
698        fricts = A[:,1]
699
700        # Get stage (index=0)
701        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
702                                       indices = (0,), precision = self.precision)
703
704        stages = B[:,0]
705
706        #<vertex #> <x> <y> [attributes]
707        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
708                                               stages, fricts):
709
710            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
711            fd.write(s)
712
713        #close
714        fd.close()
715
716
717    def store_timestep(self, t, V0, V1, V2):
718        """Store time, water heights (and momentums) to file
719        """
720        pass
721
722
723#### NBED national exposure database
724   
725LAT_TITLE = 'LATITUDE'
726LONG_TITLE = 'LONGITUDE'
727X_TITLE = 'x'
728Y_TITLE = 'y'
729class Exposure_csv:
730    def __init__(self,file_name, latitude_title=LAT_TITLE,
731                 longitude_title=LONG_TITLE, is_x_y_locations=None,
732                 x_title=X_TITLE, y_title=Y_TITLE,
733                 refine_polygon=None):
734        """
735        This class is for handling the exposure csv file.
736        It reads the file in and converts the lats and longs to a geospatial
737        data object.
738        Use the methods to read and write columns.
739
740        The format of the csv files it reads is;
741           The first row is a title row.
742           comma's are the delimiters
743           each column is a 'set' of data
744
745        Feel free to use/expand it to read other csv files.
746           
747           
748        It is not for adding and deleting rows
749       
750        Can geospatial handle string attributes? It's not made for them.
751        Currently it can't load and save string att's.
752
753        So just use geospatial to hold the x, y and georef? Bad, since
754        different att's are in diferent structures.  Not so bad, the info
755        to write if the .csv file is saved is in attribute_dic
756
757        The location info is in the geospatial attribute.
758       
759       
760        """
761        self._file_name = file_name
762        self._geospatial = None #
763
764        # self._attribute_dic is a dictionary.
765        #The keys are the column titles.
766        #The values are lists of column data
767       
768        # self._title_index_dic is a dictionary.
769        #The keys are the column titles.
770        #The values are the index positions of file columns.
771        self._attribute_dic, self._title_index_dic = \
772        self._load_exposure_csv(self._file_name)
773        try:
774            lats = self._attribute_dic[latitude_title]
775            longs = self._attribute_dic[longitude_title]
776           
777        except KeyError:
778            # maybe a warning..
779            #Let's see if this works..
780            if False != is_x_y_locations:
781                is_x_y_locations = True
782            pass
783        else:
784            self._geospatial = Geospatial_data(latitudes = lats,
785                 longitudes = longs)
786
787        if is_x_y_locations is True:
788            if self._geospatial is not None:
789                pass #fixme throw an error
790            try:
791                xs = self._attribute_dic[x_title]
792                ys = self._attribute_dic[y_title]
793                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
794            except KeyError:
795                # maybe a warning..
796                raise TitleValueError, msg
797            else:
798                self._geospatial = Geospatial_data(data_points=points)
799           
800        # create a list of points that are in the refining_polygon
801        # described by a list of indexes representing the points
802
803    def __cmp__(self, other):
804        #print "self._attribute_dic",self._attribute_dic
805        #print "other._attribute_dic",other._attribute_dic
806        #print "self._title_index_dic", self._title_index_dic
807        #print "other._title_index_dic", other._title_index_dic
808       
809        #check that a is an instance of this class
810        if isinstance(self, type(other)):
811            result = cmp(self._attribute_dic, other._attribute_dic)
812            if result <>0:
813                return result
814            # The order of the columns is important. Therefore..
815            result = cmp(self._title_index_dic, other._title_index_dic)
816            if result <>0:
817                return result
818            for self_ls, other_ls in map(None,self._attribute_dic, \
819                    other._attribute_dic):
820                result = cmp(self._attribute_dic[self_ls],
821                             other._attribute_dic[other_ls])
822                if result <>0:
823                    return result
824            return 0
825        else:
826            return 1
827   
828    def _load_exposure_csv(self, file_name):
829        """
830        Load in the csv as a dic, title as key and column info as value, .
831        Also, create a dic, title as key and column index as value.
832        """
833        #
834        attribute_dic = {}
835        title_index_dic = {}
836        titles_stripped = [] # list of titles
837       
838        reader = csv.reader(file(file_name))
839
840        # Read in and manipulate the title info
841        titles = reader.next()
842        for i,title in enumerate(titles):
843            titles_stripped.append(title.strip())
844            title_index_dic[title.strip()] = i
845        title_count = len(titles_stripped)       
846        #print "title_index_dic",title_index_dic
847
848       
849        #create a dic of colum values, indexed by column title
850        for line in reader:
851            if len(line) <> title_count:
852                raise IOError #FIXME make this nicer
853            for i, value in enumerate(line):
854                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
855           
856        return attribute_dic, title_index_dic
857
858    def get_column(self, column_name, use_refind_polygon=False):
859        """
860        Given a column name return a list of the column values
861
862        Note, the type of the values will be String!
863        do this to change a list of strings to a list of floats
864        time = [float(x) for x in time]
865       
866        Not implemented:
867        if use_refind_polygon is True, only return values in the
868        refined polygon
869        """
870        if not self._attribute_dic.has_key(column_name):
871            msg = 'Therer is  no column called %s!' %column_name
872            raise TitleValueError, msg
873        return self._attribute_dic[column_name]
874
875
876    def get_value(self, value_column_name,
877                  known_column_name,
878                  known_values,
879                  use_refind_polygon=False):
880        """
881        Do linear interpolation on the known_colum, using the known_value,
882        to return a value of the column_value_name.
883        """
884        pass
885
886
887    def get_location(self, use_refind_polygon=False):
888        """
889        Return a geospatial object which describes the
890        locations of the location file.
891
892        Note, if there is not location info, this returns None.
893       
894        Not implemented:
895        if use_refind_polygon is True, only return values in the
896        refined polygon
897        """
898        return self._geospatial
899
900    def set_column(self, column_name, column_values, overwrite=False):
901        """
902        Add a column to the 'end' (with the right most column being the end)
903        of the csv file.
904
905        Set overwrite to True if you want to overwrite a column.
906       
907        Note, in column_name white space is removed and case is not checked.
908        Precondition
909        The column_name and column_values cannot have comma's in it.
910        """
911       
912        value_row_count = \
913            len(self._attribute_dic[self._title_index_dic.keys()[0]])
914        if len(column_values) <> value_row_count: 
915            msg = 'The number of column values must equal the number of rows.'
916            raise DataMissingValuesError, msg
917       
918        if self._attribute_dic.has_key(column_name):
919            if not overwrite:
920                msg = 'Column name %s already in use!' %column_name
921                raise TitleValueError, msg
922        else:
923            # New title.  Add it to the title index.
924            self._title_index_dic[column_name] = len(self._title_index_dic)
925        self._attribute_dic[column_name] = column_values
926        #print "self._title_index_dic[column_name]",self._title_index_dic
927
928    def save(self, file_name=None):
929        """
930        Save the exposure csv file
931        """
932        if file_name is None:
933            file_name = self._file_name
934       
935        fd = open(file_name,'wb')
936        writer = csv.writer(fd)
937       
938        #Write the title to a cvs file
939        line = [None]* len(self._title_index_dic)
940        for title in self._title_index_dic.iterkeys():
941            line[self._title_index_dic[title]]= title
942        writer.writerow(line)
943       
944        # Write the values to a cvs file
945        value_row_count = \
946            len(self._attribute_dic[self._title_index_dic.keys()[0]])
947        for row_i in range(value_row_count):
948            line = [None]* len(self._title_index_dic)
949            for title in self._title_index_dic.iterkeys():
950                line[self._title_index_dic[title]]= \
951                     self._attribute_dic[title][row_i]
952            writer.writerow(line)
953
954
955#Auxiliary
956def write_obj(filename,x,y,z):
957    """Store x,y,z vectors into filename (obj format)
958       Vectors are assumed to have dimension (M,3) where
959       M corresponds to the number elements.
960       triangles are assumed to be disconnected
961
962       The three numbers in each vector correspond to three vertices,
963
964       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
965
966    """
967    #print 'Writing obj to %s' % filename
968
969    import os.path
970
971    root, ext = os.path.splitext(filename)
972    if ext == '.obj':
973        FN = filename
974    else:
975        FN = filename + '.obj'
976
977
978    outfile = open(FN, 'wb')
979    outfile.write("# Triangulation as an obj file\n")
980
981    M, N = x.shape
982    assert N==3  #Assuming three vertices per element
983
984    for i in range(M):
985        for j in range(N):
986            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
987
988    for i in range(M):
989        base = i*N
990        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
991
992    outfile.close()
993
994
995#########################################################
996#Conversion routines
997########################################################
998
999def sww2obj(basefilename, size):
1000    """Convert netcdf based data output to obj
1001    """
1002    from Scientific.IO.NetCDF import NetCDFFile
1003
1004    from Numeric import Float, zeros
1005
1006    #Get NetCDF
1007    FN = create_filename('.', basefilename, 'sww', size)
1008    print 'Reading from ', FN
1009    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1010
1011
1012    # Get the variables
1013    x = fid.variables['x']
1014    y = fid.variables['y']
1015    z = fid.variables['elevation']
1016    time = fid.variables['time']
1017    stage = fid.variables['stage']
1018
1019    M = size  #Number of lines
1020    xx = zeros((M,3), Float)
1021    yy = zeros((M,3), Float)
1022    zz = zeros((M,3), Float)
1023
1024    for i in range(M):
1025        for j in range(3):
1026            xx[i,j] = x[i+j*M]
1027            yy[i,j] = y[i+j*M]
1028            zz[i,j] = z[i+j*M]
1029
1030    #Write obj for bathymetry
1031    FN = create_filename('.', basefilename, 'obj', size)
1032    write_obj(FN,xx,yy,zz)
1033
1034
1035    #Now read all the data with variable information, combine with
1036    #x,y info and store as obj
1037
1038    for k in range(len(time)):
1039        t = time[k]
1040        print 'Processing timestep %f' %t
1041
1042        for i in range(M):
1043            for j in range(3):
1044                zz[i,j] = stage[k,i+j*M]
1045
1046
1047        #Write obj for variable data
1048        #FN = create_filename(basefilename, 'obj', size, time=t)
1049        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1050        write_obj(FN,xx,yy,zz)
1051
1052
1053def dat2obj(basefilename):
1054    """Convert line based data output to obj
1055    FIXME: Obsolete?
1056    """
1057
1058    import glob, os
1059    from config import data_dir
1060
1061
1062    #Get bathymetry and x,y's
1063    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1064
1065    from Numeric import zeros, Float
1066
1067    M = len(lines)  #Number of lines
1068    x = zeros((M,3), Float)
1069    y = zeros((M,3), Float)
1070    z = zeros((M,3), Float)
1071
1072    ##i = 0
1073    for i, line in enumerate(lines):
1074        tokens = line.split()
1075        values = map(float,tokens)
1076
1077        for j in range(3):
1078            x[i,j] = values[j*3]
1079            y[i,j] = values[j*3+1]
1080            z[i,j] = values[j*3+2]
1081
1082        ##i += 1
1083
1084
1085    #Write obj for bathymetry
1086    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1087
1088
1089    #Now read all the data files with variable information, combine with
1090    #x,y info
1091    #and store as obj
1092
1093    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1094
1095    for filename in files:
1096        print 'Processing %s' % filename
1097
1098        lines = open(data_dir+os.sep+filename,'r').readlines()
1099        assert len(lines) == M
1100        root, ext = os.path.splitext(filename)
1101
1102        #Get time from filename
1103        i0 = filename.find('_time=')
1104        if i0 == -1:
1105            #Skip bathymetry file
1106            continue
1107
1108        i0 += 6  #Position where time starts
1109        i1 = filename.find('.dat')
1110
1111        if i1 > i0:
1112            t = float(filename[i0:i1])
1113        else:
1114            raise DataTimeError, 'Hmmmm'
1115
1116
1117
1118        ##i = 0
1119        for i, line in enumerate(lines):
1120            tokens = line.split()
1121            values = map(float,tokens)
1122
1123            for j in range(3):
1124                z[i,j] = values[j]
1125
1126            ##i += 1
1127
1128        #Write obj for variable data
1129        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1130
1131
1132def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1133    """Read netcdf filename1, pick timesteps first:step:last and save to
1134    nettcdf file filename2
1135    """
1136    from Scientific.IO.NetCDF import NetCDFFile
1137
1138    #Get NetCDF
1139    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1140    outfile = NetCDFFile(filename2, 'w')  #Open new file
1141
1142
1143    #Copy dimensions
1144    for d in infile.dimensions:
1145        outfile.createDimension(d, infile.dimensions[d])
1146
1147    for name in infile.variables:
1148        var = infile.variables[name]
1149        outfile.createVariable(name, var.typecode(), var.dimensions)
1150
1151
1152    #Copy the static variables
1153    for name in infile.variables:
1154        if name == 'time' or name == 'stage':
1155            pass
1156        else:
1157            #Copy
1158            outfile.variables[name][:] = infile.variables[name][:]
1159
1160    #Copy selected timesteps
1161    time = infile.variables['time']
1162    stage = infile.variables['stage']
1163
1164    newtime = outfile.variables['time']
1165    newstage = outfile.variables['stage']
1166
1167    if last is None:
1168        last = len(time)
1169
1170    selection = range(first, last, step)
1171    for i, j in enumerate(selection):
1172        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1173        newtime[i] = time[j]
1174        newstage[i,:] = stage[j,:]
1175
1176    #Close
1177    infile.close()
1178    outfile.close()
1179
1180
1181#Get data objects
1182def get_dataobject(domain, mode='w'):
1183    """Return instance of class of given format using filename
1184    """
1185
1186    cls = eval('Data_format_%s' %domain.format)
1187    return cls(domain, mode)
1188
1189#FIXME move into geospatial.  There should only be one method that
1190# reads xya, and writes pts.
1191def xya2pts(basename_in, basename_out=None, verbose=False,
1192            #easting_min=None, easting_max=None,
1193            #northing_min=None, northing_max=None,
1194            stride = 1,
1195            attribute_name = 'elevation',
1196            z_func = None):
1197    """Read points data from ascii (.xya)
1198
1199    Example:
1200
1201              x(m)        y(m)        z(m)
1202         0.00000e+00  0.00000e+00  1.3535000e-01
1203         0.00000e+00  1.40000e-02  1.3535000e-01
1204
1205
1206
1207    Convert to NetCDF pts format which is
1208
1209    points:  (Nx2) Float array
1210    elevation: N Float array
1211
1212    Only lines that contain three numeric values are processed
1213
1214    If z_func is specified, it will be applied to the third field
1215    """
1216
1217    import os
1218    #from Scientific.IO.NetCDF import NetCDFFile
1219    from Numeric import Float, arrayrange, concatenate
1220
1221    root, ext = os.path.splitext(basename_in)
1222
1223    if ext == '': ext = '.xya'
1224
1225    #Get NetCDF
1226    infile = open(root + ext, 'r')  #Open existing xya file for read
1227
1228    if verbose: print 'Reading xya points from %s' %(root + ext)
1229
1230    points = []
1231    attribute = []
1232    for i, line in enumerate(infile.readlines()):
1233
1234        if i % stride != 0: continue
1235
1236        fields = line.split()
1237
1238        try:
1239            assert len(fields) == 3
1240        except:
1241            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1242
1243        try:
1244            x = float( fields[0] )
1245            y = float( fields[1] )
1246            z = float( fields[2] )
1247        except:
1248            continue
1249
1250        points.append( [x, y] )
1251
1252        if callable(z_func):
1253            attribute.append(z_func(z))
1254        else:
1255            attribute.append(z)
1256
1257
1258    #Get output file
1259    if basename_out == None:
1260        ptsname = root + '.pts'
1261    else:
1262        ptsname = basename_out + '.pts'
1263
1264    if verbose: print 'Store to NetCDF file %s' %ptsname
1265    write_ptsfile(ptsname, points, attribute, attribute_name)
1266
1267
1268
1269######Obsoleted by export_points in load_mesh
1270def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1271                  zone=None, xllcorner=None, yllcorner=None):
1272    """Write points and associated attribute to pts (NetCDF) format
1273    """
1274
1275    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1276
1277    from Numeric import Float
1278
1279    if attribute_name is None:
1280        attribute_name = 'attribute'
1281
1282
1283    from Scientific.IO.NetCDF import NetCDFFile
1284
1285    # NetCDF file definition
1286    outfile = NetCDFFile(ptsname, 'w')
1287
1288
1289    #Create new file
1290    outfile.institution = 'Geoscience Australia'
1291    outfile.description = 'NetCDF pts format for compact and '\
1292                          'portable storage of spatial point data'
1293
1294
1295    #Georeferencing
1296    from coordinate_transforms.geo_reference import Geo_reference
1297    if zone is None:
1298        assert xllcorner is None, 'xllcorner must be None'
1299        assert yllcorner is None, 'yllcorner must be None'
1300        Geo_reference().write_NetCDF(outfile)
1301    else:
1302        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1303
1304
1305
1306    outfile.createDimension('number_of_points', len(points))
1307    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1308
1309    # variable definitions
1310    outfile.createVariable('points', Float, ('number_of_points',
1311                                             'number_of_dimensions'))
1312    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1313
1314    # Get handles to the variables
1315    nc_points = outfile.variables['points']
1316    nc_attribute = outfile.variables[attribute_name]
1317
1318    #Store data
1319    nc_points[:, :] = points
1320    nc_attribute[:] = attribute
1321
1322    outfile.close()
1323
1324
1325def dem2pts(basename_in, basename_out=None,
1326            easting_min=None, easting_max=None,
1327            northing_min=None, northing_max=None,
1328            use_cache=False, verbose=False,):
1329    """Read Digitial Elevation model from the following NetCDF format (.dem)
1330
1331    Example:
1332
1333    ncols         3121
1334    nrows         1800
1335    xllcorner     722000
1336    yllcorner     5893000
1337    cellsize      25
1338    NODATA_value  -9999
1339    138.3698 137.4194 136.5062 135.5558 ..........
1340
1341    Convert to NetCDF pts format which is
1342
1343    points:  (Nx2) Float array
1344    elevation: N Float array
1345    """
1346
1347
1348
1349    kwargs = {'basename_out': basename_out,
1350              'easting_min': easting_min,
1351              'easting_max': easting_max,
1352              'northing_min': northing_min,
1353              'northing_max': northing_max,
1354              'verbose': verbose}
1355
1356    if use_cache is True:
1357        from caching import cache
1358        result = cache(_dem2pts, basename_in, kwargs,
1359                       dependencies = [basename_in + '.dem'],
1360                       verbose = verbose)
1361
1362    else:
1363        result = apply(_dem2pts, [basename_in], kwargs)
1364
1365    return result
1366
1367
1368def _dem2pts(basename_in, basename_out=None, verbose=False,
1369            easting_min=None, easting_max=None,
1370            northing_min=None, northing_max=None):
1371    """Read Digitial Elevation model from the following NetCDF format (.dem)
1372
1373    Internal function. See public function dem2pts for details.
1374    """
1375
1376    #FIXME: Can this be written feasibly using write_pts?
1377
1378    import os
1379    from Scientific.IO.NetCDF import NetCDFFile
1380    from Numeric import Float, zeros, reshape, sum
1381
1382    root = basename_in
1383
1384    #Get NetCDF
1385    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1386
1387    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1388
1389    ncols = infile.ncols[0]
1390    nrows = infile.nrows[0]
1391    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1392    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1393    cellsize = infile.cellsize[0]
1394    NODATA_value = infile.NODATA_value[0]
1395    dem_elevation = infile.variables['elevation']
1396
1397    zone = infile.zone[0]
1398    false_easting = infile.false_easting[0]
1399    false_northing = infile.false_northing[0]
1400
1401    #Text strings
1402    projection = infile.projection
1403    datum = infile.datum
1404    units = infile.units
1405
1406
1407    #Get output file
1408    if basename_out == None:
1409        ptsname = root + '.pts'
1410    else:
1411        ptsname = basename_out + '.pts'
1412
1413    if verbose: print 'Store to NetCDF file %s' %ptsname
1414    # NetCDF file definition
1415    outfile = NetCDFFile(ptsname, 'w')
1416
1417    #Create new file
1418    outfile.institution = 'Geoscience Australia'
1419    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1420                      'of spatial point data'
1421    #assign default values
1422    if easting_min is None: easting_min = xllcorner
1423    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1424    if northing_min is None: northing_min = yllcorner
1425    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1426
1427    #compute offsets to update georeferencing
1428    easting_offset = xllcorner - easting_min
1429    northing_offset = yllcorner - northing_min
1430
1431    #Georeferencing
1432    outfile.zone = zone
1433    outfile.xllcorner = easting_min #Easting of lower left corner
1434    outfile.yllcorner = northing_min #Northing of lower left corner
1435    outfile.false_easting = false_easting
1436    outfile.false_northing = false_northing
1437
1438    outfile.projection = projection
1439    outfile.datum = datum
1440    outfile.units = units
1441
1442
1443    #Grid info (FIXME: probably not going to be used, but heck)
1444    outfile.ncols = ncols
1445    outfile.nrows = nrows
1446
1447    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1448    totalnopoints = nrows*ncols
1449
1450    # calculating number of NODATA_values for each row in clipped region
1451    #FIXME: use array operations to do faster
1452    nn = 0
1453    k = 0
1454    i1_0 = 0
1455    j1_0 = 0
1456    thisj = 0
1457    thisi = 0
1458    for i in range(nrows):
1459        y = (nrows-i-1)*cellsize + yllcorner
1460        for j in range(ncols):
1461            x = j*cellsize + xllcorner
1462            if easting_min <= x <= easting_max and \
1463               northing_min <= y <= northing_max:
1464                thisj = j
1465                thisi = i
1466                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1467
1468                if k == 0:
1469                    i1_0 = i
1470                    j1_0 = j
1471                k += 1
1472
1473    index1 = j1_0
1474    index2 = thisj
1475
1476    # dimension definitions
1477    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1478    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1479
1480    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1481    nopoints = clippednopoints-nn
1482
1483    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1484
1485    if verbose and nn > 0:
1486        print 'There are %d values in the elevation' %totalnopoints
1487        print 'There are %d values in the clipped elevation' %clippednopoints
1488        print 'There are %d NODATA_values in the clipped elevation' %nn
1489
1490    outfile.createDimension('number_of_points', nopoints)
1491    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1492
1493    # variable definitions
1494    outfile.createVariable('points', Float, ('number_of_points',
1495                                             'number_of_dimensions'))
1496    outfile.createVariable('elevation', Float, ('number_of_points',))
1497
1498    # Get handles to the variables
1499    points = outfile.variables['points']
1500    elevation = outfile.variables['elevation']
1501
1502    lenv = index2-index1+1
1503    #Store data
1504    global_index = 0
1505    #for i in range(nrows):
1506    for i in range(i1_0,thisi+1,1):
1507        if verbose and i%((nrows+10)/10)==0:
1508            print 'Processing row %d of %d' %(i, nrows)
1509
1510        lower_index = global_index
1511
1512        v = dem_elevation_r[i,index1:index2+1]
1513        no_NODATA = sum(v == NODATA_value)
1514        if no_NODATA > 0:
1515            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1516        else:
1517            newcols = lenv #ncols_in_bounding_box
1518
1519        telev = zeros(newcols, Float)
1520        tpoints = zeros((newcols, 2), Float)
1521
1522        local_index = 0
1523
1524        y = (nrows-i-1)*cellsize + yllcorner
1525        #for j in range(ncols):
1526        for j in range(j1_0,index2+1,1):
1527
1528            x = j*cellsize + xllcorner
1529            if easting_min <= x <= easting_max and \
1530               northing_min <= y <= northing_max and \
1531               dem_elevation_r[i,j] <> NODATA_value:
1532                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1533                telev[local_index] = dem_elevation_r[i, j]
1534                global_index += 1
1535                local_index += 1
1536
1537        upper_index = global_index
1538
1539        if upper_index == lower_index + newcols:
1540            points[lower_index:upper_index, :] = tpoints
1541            elevation[lower_index:upper_index] = telev
1542
1543    assert global_index == nopoints, 'index not equal to number of points'
1544
1545    infile.close()
1546    outfile.close()
1547
1548
1549
1550def _read_hecras_cross_sections(lines):
1551    """Return block of surface lines for each cross section
1552    Starts with SURFACE LINE,
1553    Ends with END CROSS-SECTION
1554    """
1555
1556    points = []
1557
1558    reading_surface = False
1559    for i, line in enumerate(lines):
1560
1561        if len(line.strip()) == 0:    #Ignore blanks
1562            continue
1563
1564        if lines[i].strip().startswith('SURFACE LINE'):
1565            reading_surface = True
1566            continue
1567
1568        if lines[i].strip().startswith('END') and reading_surface:
1569            yield points
1570            reading_surface = False
1571            points = []
1572
1573        if reading_surface:
1574            fields = line.strip().split(',')
1575            easting = float(fields[0])
1576            northing = float(fields[1])
1577            elevation = float(fields[2])
1578            points.append([easting, northing, elevation])
1579
1580
1581
1582
1583def hecras_cross_sections2pts(basename_in,
1584                              basename_out=None,
1585                              verbose=False):
1586    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1587
1588    Example:
1589
1590
1591# RAS export file created on Mon 15Aug2005 11:42
1592# by HEC-RAS Version 3.1.1
1593
1594BEGIN HEADER:
1595  UNITS: METRIC
1596  DTM TYPE: TIN
1597  DTM: v:\1\cit\perth_topo\river_tin
1598  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1599  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1600  MAP PROJECTION: UTM
1601  PROJECTION ZONE: 50
1602  DATUM: AGD66
1603  VERTICAL DATUM:
1604  NUMBER OF REACHES:  19
1605  NUMBER OF CROSS-SECTIONS:  14206
1606END HEADER:
1607
1608
1609Only the SURFACE LINE data of the following form will be utilised
1610
1611  CROSS-SECTION:
1612    STREAM ID:Southern-Wungong
1613    REACH ID:Southern-Wungong
1614    STATION:19040.*
1615    CUT LINE:
1616      405548.671603161 , 6438142.7594925
1617      405734.536092045 , 6438326.10404912
1618      405745.130459356 , 6438331.48627354
1619      405813.89633823 , 6438368.6272789
1620    SURFACE LINE:
1621     405548.67,   6438142.76,   35.37
1622     405552.24,   6438146.28,   35.41
1623     405554.78,   6438148.78,   35.44
1624     405555.80,   6438149.79,   35.44
1625     405559.37,   6438153.31,   35.45
1626     405560.88,   6438154.81,   35.44
1627     405562.93,   6438156.83,   35.42
1628     405566.50,   6438160.35,   35.38
1629     405566.99,   6438160.83,   35.37
1630     ...
1631   END CROSS-SECTION
1632
1633    Convert to NetCDF pts format which is
1634
1635    points:  (Nx2) Float array
1636    elevation: N Float array
1637    """
1638
1639    #FIXME: Can this be written feasibly using write_pts?
1640
1641    import os
1642    from Scientific.IO.NetCDF import NetCDFFile
1643    from Numeric import Float, zeros, reshape
1644
1645    root = basename_in
1646
1647    #Get ASCII file
1648    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1649
1650    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1651
1652    lines = infile.readlines()
1653    infile.close()
1654
1655    if verbose: print 'Converting to pts format'
1656
1657    i = 0
1658    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1659        i += 1
1660
1661    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1662    i += 1
1663
1664    assert lines[i].strip().upper().startswith('UNITS:')
1665    units = lines[i].strip().split()[1]
1666    i += 1
1667
1668    assert lines[i].strip().upper().startswith('DTM TYPE:')
1669    i += 1
1670
1671    assert lines[i].strip().upper().startswith('DTM:')
1672    i += 1
1673
1674    assert lines[i].strip().upper().startswith('STREAM')
1675    i += 1
1676
1677    assert lines[i].strip().upper().startswith('CROSS')
1678    i += 1
1679
1680    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1681    projection = lines[i].strip().split(':')[1]
1682    i += 1
1683
1684    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1685    zone = int(lines[i].strip().split(':')[1])
1686    i += 1
1687
1688    assert lines[i].strip().upper().startswith('DATUM:')
1689    datum = lines[i].strip().split(':')[1]
1690    i += 1
1691
1692    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1693    i += 1
1694
1695    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1696    i += 1
1697
1698    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1699    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1700    i += 1
1701
1702
1703    #Now read all points
1704    points = []
1705    elevation = []
1706    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1707        for k, entry in enumerate(entries):
1708            points.append(entry[:2])
1709            elevation.append(entry[2])
1710
1711
1712    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1713          %(j+1, number_of_cross_sections)
1714    assert j+1 == number_of_cross_sections, msg
1715
1716    #Get output file
1717    if basename_out == None:
1718        ptsname = root + '.pts'
1719    else:
1720        ptsname = basename_out + '.pts'
1721
1722    #FIXME (DSG-ON): use loadASCII export_points_file
1723    if verbose: print 'Store to NetCDF file %s' %ptsname
1724    # NetCDF file definition
1725    outfile = NetCDFFile(ptsname, 'w')
1726
1727    #Create new file
1728    outfile.institution = 'Geoscience Australia'
1729    outfile.description = 'NetCDF pts format for compact and portable ' +\
1730                          'storage of spatial point data derived from HEC-RAS'
1731
1732    #Georeferencing
1733    outfile.zone = zone
1734    outfile.xllcorner = 0.0
1735    outfile.yllcorner = 0.0
1736    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1737    outfile.false_northing = 1000000
1738
1739    outfile.projection = projection
1740    outfile.datum = datum
1741    outfile.units = units
1742
1743
1744    outfile.createDimension('number_of_points', len(points))
1745    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1746
1747    # variable definitions
1748    outfile.createVariable('points', Float, ('number_of_points',
1749                                             'number_of_dimensions'))
1750    outfile.createVariable('elevation', Float, ('number_of_points',))
1751
1752    # Get handles to the variables
1753    outfile.variables['points'][:] = points
1754    outfile.variables['elevation'][:] = elevation
1755
1756    outfile.close()
1757
1758
1759
1760def sww2dem(basename_in, basename_out = None,
1761            quantity = None,
1762            timestep = None,
1763            reduction = None,
1764            cellsize = 10,
1765            NODATA_value = -9999,
1766            easting_min = None,
1767            easting_max = None,
1768            northing_min = None,
1769            northing_max = None,
1770            verbose = False,
1771            origin = None,
1772            datum = 'WGS84',
1773        format = 'ers'):
1774
1775    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1776
1777    Example (ASC):
1778
1779    ncols         3121
1780    nrows         1800
1781    xllcorner     722000
1782    yllcorner     5893000
1783    cellsize      25
1784    NODATA_value  -9999
1785    138.3698 137.4194 136.5062 135.5558 ..........
1786
1787    Also write accompanying file with same basename_in but extension .prj
1788    used to fix the UTM zone, datum, false northings and eastings.
1789
1790    The prj format is assumed to be as
1791
1792    Projection    UTM
1793    Zone          56
1794    Datum         WGS84
1795    Zunits        NO
1796    Units         METERS
1797    Spheroid      WGS84
1798    Xshift        0.0000000000
1799    Yshift        10000000.0000000000
1800    Parameters
1801
1802
1803    The parameter quantity must be the name of an existing quantity or
1804    an expression involving existing quantities. The default is
1805    'elevation'.
1806
1807    if timestep (an index) is given, output quantity at that timestep
1808
1809    if reduction is given use that to reduce quantity over all timesteps.
1810
1811    datum
1812
1813    format can be either 'asc' or 'ers'
1814    """
1815
1816    import sys
1817    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1818    from Numeric import array2string
1819
1820    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1821    from util import apply_expression_to_dictionary
1822
1823    msg = 'Format must be either asc or ers'
1824    assert format.lower() in ['asc', 'ers'], msg
1825
1826
1827    false_easting = 500000
1828    false_northing = 10000000
1829
1830    if quantity is None:
1831        quantity = 'elevation'
1832
1833    if reduction is None:
1834        reduction = max
1835
1836    if basename_out is None:
1837        basename_out = basename_in + '_%s' %quantity
1838
1839    swwfile = basename_in + '.sww'
1840    demfile = basename_out + '.' + format
1841    # Note the use of a .ers extension is optional (write_ermapper_grid will
1842    # deal with either option
1843
1844    #Read sww file
1845    if verbose: print 'Reading from %s' %swwfile
1846    from Scientific.IO.NetCDF import NetCDFFile
1847    fid = NetCDFFile(swwfile)
1848
1849    #Get extent and reference
1850    x = fid.variables['x'][:]
1851    y = fid.variables['y'][:]
1852    volumes = fid.variables['volumes'][:]
1853
1854    number_of_timesteps = fid.dimensions['number_of_timesteps']
1855    number_of_points = fid.dimensions['number_of_points']
1856    if origin is None:
1857
1858        #Get geo_reference
1859        #sww files don't have to have a geo_ref
1860        try:
1861            geo_reference = Geo_reference(NetCDFObject=fid)
1862        except AttributeError, e:
1863            geo_reference = Geo_reference() #Default georef object
1864
1865        xllcorner = geo_reference.get_xllcorner()
1866        yllcorner = geo_reference.get_yllcorner()
1867        zone = geo_reference.get_zone()
1868    else:
1869        zone = origin[0]
1870        xllcorner = origin[1]
1871        yllcorner = origin[2]
1872
1873
1874
1875    #FIXME: Refactor using code from file_function.statistics
1876    #Something like print swwstats(swwname)
1877    if verbose:
1878        x = fid.variables['x'][:]
1879        y = fid.variables['y'][:]
1880        times = fid.variables['time'][:]
1881        print '------------------------------------------------'
1882        print 'Statistics of SWW file:'
1883        print '  Name: %s' %swwfile
1884        print '  Reference:'
1885        print '    Lower left corner: [%f, %f]'\
1886              %(xllcorner, yllcorner)
1887        print '    Start time: %f' %fid.starttime[0]
1888        print '  Extent:'
1889        print '    x [m] in [%f, %f], len(x) == %d'\
1890              %(min(x.flat), max(x.flat), len(x.flat))
1891        print '    y [m] in [%f, %f], len(y) == %d'\
1892              %(min(y.flat), max(y.flat), len(y.flat))
1893        print '    t [s] in [%f, %f], len(t) == %d'\
1894              %(min(times), max(times), len(times))
1895        print '  Quantities [SI units]:'
1896        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1897            q = fid.variables[name][:].flat
1898            print '    %s in [%f, %f]' %(name, min(q), max(q))
1899
1900
1901
1902
1903
1904    # Get quantity and reduce if applicable
1905    if verbose: print 'Processing quantity %s' %quantity
1906
1907    # Turn NetCDF objects into Numeric arrays
1908    quantity_dict = {}
1909    for name in fid.variables.keys():
1910        quantity_dict[name] = fid.variables[name][:]
1911
1912
1913
1914    # Convert quantity expression to quantities found in sww file   
1915    q = apply_expression_to_dictionary(quantity, quantity_dict)
1916
1917
1918
1919    if len(q.shape) == 2:
1920        #q has a time component and needs to be reduced along
1921        #the temporal dimension
1922        if verbose: print 'Reducing quantity %s' %quantity
1923        q_reduced = zeros( number_of_points, Float )
1924
1925        for k in range(number_of_points):
1926            q_reduced[k] = reduction( q[:,k] )
1927
1928        q = q_reduced
1929
1930    #Post condition: Now q has dimension: number_of_points
1931    assert len(q.shape) == 1
1932    assert q.shape[0] == number_of_points
1933
1934
1935    if verbose:
1936        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1937
1938
1939    #Create grid and update xll/yll corner and x,y
1940
1941    #Relative extent
1942    if easting_min is None:
1943        xmin = min(x)
1944    else:
1945        xmin = easting_min - xllcorner
1946
1947    if easting_max is None:
1948        xmax = max(x)
1949    else:
1950        xmax = easting_max - xllcorner
1951
1952    if northing_min is None:
1953        ymin = min(y)
1954    else:
1955        ymin = northing_min - yllcorner
1956
1957    if northing_max is None:
1958        ymax = max(y)
1959    else:
1960        ymax = northing_max - yllcorner
1961
1962
1963
1964    if verbose: print 'Creating grid'
1965    ncols = int((xmax-xmin)/cellsize)+1
1966    nrows = int((ymax-ymin)/cellsize)+1
1967
1968
1969    #New absolute reference and coordinates
1970    newxllcorner = xmin+xllcorner
1971    newyllcorner = ymin+yllcorner
1972
1973    x = x+xllcorner-newxllcorner
1974    y = y+yllcorner-newyllcorner
1975
1976    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1977    assert len(vertex_points.shape) == 2
1978
1979
1980
1981    grid_points = zeros ( (ncols*nrows, 2), Float )
1982
1983
1984    for i in xrange(nrows):
1985        if format.lower() == 'asc':
1986            yg = i*cellsize
1987        else:
1988        #this will flip the order of the y values for ers
1989            yg = (nrows-i)*cellsize
1990
1991        for j in xrange(ncols):
1992            xg = j*cellsize
1993            k = i*ncols + j
1994
1995            grid_points[k,0] = xg
1996            grid_points[k,1] = yg
1997
1998    #Interpolate
1999    #from least_squares import Interpolation
2000    from fit_interpolate.interpolate import Interpolate
2001
2002
2003    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2004
2005
2006
2007    #Interpolate using quantity values
2008    if verbose: print 'Interpolating'
2009    grid_values = interp.interpolate(q, grid_points).flat
2010
2011
2012    if verbose:
2013        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2014                                                      max(grid_values))
2015
2016    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2017    P = interp.mesh.get_boundary_polygon()
2018    outside_indices = outside_polygon(grid_points, P, closed=True)
2019
2020    for i in outside_indices:
2021        grid_values[i] = NODATA_value
2022
2023
2024
2025
2026    if format.lower() == 'ers':
2027        # setup ERS header information
2028        grid_values = reshape(grid_values,(nrows, ncols))
2029        header = {}
2030        header['datum'] = '"' + datum + '"'
2031        # FIXME The use of hardwired UTM and zone number needs to be made optional
2032        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2033        header['projection'] = '"UTM-' + str(zone) + '"'
2034        header['coordinatetype'] = 'EN'
2035        if header['coordinatetype'] == 'LL':
2036            header['longitude'] = str(newxllcorner)
2037            header['latitude'] = str(newyllcorner)
2038        elif header['coordinatetype'] == 'EN':
2039            header['eastings'] = str(newxllcorner)
2040            header['northings'] = str(newyllcorner)
2041        header['nullcellvalue'] = str(NODATA_value)
2042        header['xdimension'] = str(cellsize)
2043        header['ydimension'] = str(cellsize)
2044        header['value'] = '"' + quantity + '"'
2045        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2046
2047
2048        #Write
2049        if verbose: print 'Writing %s' %demfile
2050        import ermapper_grids
2051        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2052
2053        fid.close()
2054    else:
2055        #Write to Ascii format
2056
2057        #Write prj file
2058        prjfile = basename_out + '.prj'
2059
2060        if verbose: print 'Writing %s' %prjfile
2061        prjid = open(prjfile, 'w')
2062        prjid.write('Projection    %s\n' %'UTM')
2063        prjid.write('Zone          %d\n' %zone)
2064        prjid.write('Datum         %s\n' %datum)
2065        prjid.write('Zunits        NO\n')
2066        prjid.write('Units         METERS\n')
2067        prjid.write('Spheroid      %s\n' %datum)
2068        prjid.write('Xshift        %d\n' %false_easting)
2069        prjid.write('Yshift        %d\n' %false_northing)
2070        prjid.write('Parameters\n')
2071        prjid.close()
2072
2073
2074
2075        if verbose: print 'Writing %s' %demfile
2076
2077        ascid = open(demfile, 'w')
2078
2079        ascid.write('ncols         %d\n' %ncols)
2080        ascid.write('nrows         %d\n' %nrows)
2081        ascid.write('xllcorner     %d\n' %newxllcorner)
2082        ascid.write('yllcorner     %d\n' %newyllcorner)
2083        ascid.write('cellsize      %f\n' %cellsize)
2084        ascid.write('NODATA_value  %d\n' %NODATA_value)
2085
2086
2087        #Get bounding polygon from mesh
2088        #P = interp.mesh.get_boundary_polygon()
2089        #inside_indices = inside_polygon(grid_points, P)
2090
2091        for i in range(nrows):
2092            if verbose and i%((nrows+10)/10)==0:
2093                print 'Doing row %d of %d' %(i, nrows)
2094
2095            base_index = (nrows-i-1)*ncols
2096
2097            slice = grid_values[base_index:base_index+ncols]
2098            s = array2string(slice, max_line_width=sys.maxint)
2099            ascid.write(s[1:-1] + '\n')
2100
2101
2102            #print
2103            #for j in range(ncols):
2104            #    index = base_index+j#
2105            #    print grid_values[index],
2106            #    ascid.write('%f ' %grid_values[index])
2107            #ascid.write('\n')
2108
2109        #Close
2110        ascid.close()
2111        fid.close()
2112
2113#Backwards compatibility
2114def sww2asc(basename_in, basename_out = None,
2115            quantity = None,
2116            timestep = None,
2117            reduction = None,
2118            cellsize = 10,
2119            verbose = False,
2120            origin = None):
2121    print 'sww2asc will soon be obsoleted - please use sww2dem'
2122    sww2dem(basename_in,
2123            basename_out = basename_out,
2124            quantity = quantity,
2125            timestep = timestep,
2126            reduction = reduction,
2127            cellsize = cellsize,
2128            verbose = verbose,
2129            origin = origin,
2130        datum = 'WGS84',
2131        format = 'asc')
2132
2133def sww2ers(basename_in, basename_out = None,
2134            quantity = None,
2135            timestep = None,
2136            reduction = None,
2137            cellsize = 10,
2138            verbose = False,
2139            origin = None,
2140            datum = 'WGS84'):
2141    print 'sww2ers will soon be obsoleted - please use sww2dem'
2142    sww2dem(basename_in,
2143            basename_out = basename_out,
2144            quantity = quantity,
2145            timestep = timestep,
2146            reduction = reduction,
2147            cellsize = cellsize,
2148            verbose = verbose,
2149            origin = origin,
2150            datum = datum,
2151            format = 'ers')
2152################################# END COMPATIBILITY ##############
2153
2154
2155
2156def sww2pts(basename_in, basename_out=None,
2157            data_points=None,
2158            quantity=None,
2159            timestep=None,
2160            reduction=None,
2161            NODATA_value=-9999,
2162            verbose=False,
2163            origin=None):
2164            #datum = 'WGS84')
2165
2166
2167    """Read SWW file and convert to interpolated values at selected points
2168
2169    The parameter quantity' must be the name of an existing quantity or
2170    an expression involving existing quantities. The default is
2171    'elevation'.
2172
2173    if timestep (an index) is given, output quantity at that timestep
2174
2175    if reduction is given use that to reduce quantity over all timesteps.
2176
2177    data_points (Nx2 array) give locations of points where quantity is to be computed.
2178   
2179    """
2180
2181    import sys
2182    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2183    from Numeric import array2string
2184
2185    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2186    from util import apply_expression_to_dictionary
2187
2188    from geospatial_data import Geospatial_data
2189
2190    if quantity is None:
2191        quantity = 'elevation'
2192
2193    if reduction is None:
2194        reduction = max
2195
2196    if basename_out is None:
2197        basename_out = basename_in + '_%s' %quantity
2198
2199    swwfile = basename_in + '.sww'
2200    ptsfile = basename_out + '.pts'
2201
2202    # Read sww file
2203    if verbose: print 'Reading from %s' %swwfile
2204    from Scientific.IO.NetCDF import NetCDFFile
2205    fid = NetCDFFile(swwfile)
2206
2207    # Get extent and reference
2208    x = fid.variables['x'][:]
2209    y = fid.variables['y'][:]
2210    volumes = fid.variables['volumes'][:]
2211
2212    number_of_timesteps = fid.dimensions['number_of_timesteps']
2213    number_of_points = fid.dimensions['number_of_points']
2214    if origin is None:
2215
2216        # Get geo_reference
2217        # sww files don't have to have a geo_ref
2218        try:
2219            geo_reference = Geo_reference(NetCDFObject=fid)
2220        except AttributeError, e:
2221            geo_reference = Geo_reference() #Default georef object
2222
2223        xllcorner = geo_reference.get_xllcorner()
2224        yllcorner = geo_reference.get_yllcorner()
2225        zone = geo_reference.get_zone()
2226    else:
2227        zone = origin[0]
2228        xllcorner = origin[1]
2229        yllcorner = origin[2]
2230
2231
2232
2233    # FIXME: Refactor using code from file_function.statistics
2234    # Something like print swwstats(swwname)
2235    if verbose:
2236        x = fid.variables['x'][:]
2237        y = fid.variables['y'][:]
2238        times = fid.variables['time'][:]
2239        print '------------------------------------------------'
2240        print 'Statistics of SWW file:'
2241        print '  Name: %s' %swwfile
2242        print '  Reference:'
2243        print '    Lower left corner: [%f, %f]'\
2244              %(xllcorner, yllcorner)
2245        print '    Start time: %f' %fid.starttime[0]
2246        print '  Extent:'
2247        print '    x [m] in [%f, %f], len(x) == %d'\
2248              %(min(x.flat), max(x.flat), len(x.flat))
2249        print '    y [m] in [%f, %f], len(y) == %d'\
2250              %(min(y.flat), max(y.flat), len(y.flat))
2251        print '    t [s] in [%f, %f], len(t) == %d'\
2252              %(min(times), max(times), len(times))
2253        print '  Quantities [SI units]:'
2254        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2255            q = fid.variables[name][:].flat
2256            print '    %s in [%f, %f]' %(name, min(q), max(q))
2257
2258
2259
2260    # Get quantity and reduce if applicable
2261    if verbose: print 'Processing quantity %s' %quantity
2262
2263    # Turn NetCDF objects into Numeric arrays
2264    quantity_dict = {}
2265    for name in fid.variables.keys():
2266        quantity_dict[name] = fid.variables[name][:]
2267
2268
2269
2270    # Convert quantity expression to quantities found in sww file   
2271    q = apply_expression_to_dictionary(quantity, quantity_dict)
2272
2273
2274
2275    if len(q.shape) == 2:
2276        # q has a time component and needs to be reduced along
2277        # the temporal dimension
2278        if verbose: print 'Reducing quantity %s' %quantity
2279        q_reduced = zeros( number_of_points, Float )
2280
2281        for k in range(number_of_points):
2282            q_reduced[k] = reduction( q[:,k] )
2283
2284        q = q_reduced
2285
2286    # Post condition: Now q has dimension: number_of_points
2287    assert len(q.shape) == 1
2288    assert q.shape[0] == number_of_points
2289
2290
2291    if verbose:
2292        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2293
2294
2295    # Create grid and update xll/yll corner and x,y
2296    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2297    assert len(vertex_points.shape) == 2
2298
2299    # Interpolate
2300    from fit_interpolate.interpolate import Interpolate
2301    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2302
2303    # Interpolate using quantity values
2304    if verbose: print 'Interpolating'
2305    interpolated_values = interp.interpolate(q, data_points).flat
2306
2307
2308    if verbose:
2309        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2310                                                      max(interpolated_values))
2311
2312    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2313    P = interp.mesh.get_boundary_polygon()
2314    outside_indices = outside_polygon(data_points, P, closed=True)
2315
2316    for i in outside_indices:
2317        interpolated_values[i] = NODATA_value
2318
2319
2320    # Store results   
2321    G = Geospatial_data(data_points=data_points,
2322                        attributes=interpolated_values)
2323
2324    G.export_points_file(ptsfile, absolute = True)
2325
2326    fid.close()
2327
2328
2329def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2330                                  use_cache = False,
2331                                  verbose = False):
2332    """Read Digitial Elevation model from the following ASCII format (.asc)
2333
2334    Example:
2335
2336    ncols         3121
2337    nrows         1800
2338    xllcorner     722000
2339    yllcorner     5893000
2340    cellsize      25
2341    NODATA_value  -9999
2342    138.3698 137.4194 136.5062 135.5558 ..........
2343
2344    Convert basename_in + '.asc' to NetCDF format (.dem)
2345    mimicking the ASCII format closely.
2346
2347
2348    An accompanying file with same basename_in but extension .prj must exist
2349    and is used to fix the UTM zone, datum, false northings and eastings.
2350
2351    The prj format is assumed to be as
2352
2353    Projection    UTM
2354    Zone          56
2355    Datum         WGS84
2356    Zunits        NO
2357    Units         METERS
2358    Spheroid      WGS84
2359    Xshift        0.0000000000
2360    Yshift        10000000.0000000000
2361    Parameters
2362    """
2363
2364
2365
2366    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2367
2368    if use_cache is True:
2369        from caching import cache
2370        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2371                       dependencies = [basename_in + '.asc'],
2372                       verbose = verbose)
2373
2374    else:
2375        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2376
2377    return result
2378
2379
2380
2381
2382
2383
2384def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2385                                  verbose = False):
2386    """Read Digitial Elevation model from the following ASCII format (.asc)
2387
2388    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2389    """
2390
2391    import os
2392    from Scientific.IO.NetCDF import NetCDFFile
2393    from Numeric import Float, array
2394
2395    #root, ext = os.path.splitext(basename_in)
2396    root = basename_in
2397
2398    ###########################################
2399    # Read Meta data
2400    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2401    metadatafile = open(root + '.prj')
2402    metalines = metadatafile.readlines()
2403    metadatafile.close()
2404
2405    L = metalines[0].strip().split()
2406    assert L[0].strip().lower() == 'projection'
2407    projection = L[1].strip()                   #TEXT
2408
2409    L = metalines[1].strip().split()
2410    assert L[0].strip().lower() == 'zone'
2411    zone = int(L[1].strip())
2412
2413    L = metalines[2].strip().split()
2414    assert L[0].strip().lower() == 'datum'
2415    datum = L[1].strip()                        #TEXT
2416
2417    L = metalines[3].strip().split()
2418    assert L[0].strip().lower() == 'zunits'     #IGNORE
2419    zunits = L[1].strip()                       #TEXT
2420
2421    L = metalines[4].strip().split()
2422    assert L[0].strip().lower() == 'units'
2423    units = L[1].strip()                        #TEXT
2424
2425    L = metalines[5].strip().split()
2426    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2427    spheroid = L[1].strip()                     #TEXT
2428
2429    L = metalines[6].strip().split()
2430    assert L[0].strip().lower() == 'xshift'
2431    false_easting = float(L[1].strip())
2432
2433    L = metalines[7].strip().split()
2434    assert L[0].strip().lower() == 'yshift'
2435    false_northing = float(L[1].strip())
2436
2437    #print false_easting, false_northing, zone, datum
2438
2439
2440    ###########################################
2441    #Read DEM data
2442
2443    datafile = open(basename_in + '.asc')
2444
2445    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2446    lines = datafile.readlines()
2447    datafile.close()
2448
2449    if verbose: print 'Got', len(lines), ' lines'
2450
2451    ncols = int(lines[0].split()[1].strip())
2452    nrows = int(lines[1].split()[1].strip())
2453    xllcorner = float(lines[2].split()[1].strip())
2454    yllcorner = float(lines[3].split()[1].strip())
2455    cellsize = float(lines[4].split()[1].strip())
2456    NODATA_value = int(lines[5].split()[1].strip())
2457
2458    assert len(lines) == nrows + 6
2459
2460
2461    ##########################################
2462
2463
2464    if basename_out == None:
2465        netcdfname = root + '.dem'
2466    else:
2467        netcdfname = basename_out + '.dem'
2468
2469    if verbose: print 'Store to NetCDF file %s' %netcdfname
2470    # NetCDF file definition
2471    fid = NetCDFFile(netcdfname, 'w')
2472
2473    #Create new file
2474    fid.institution = 'Geoscience Australia'
2475    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2476                      'of spatial point data'
2477
2478    fid.ncols = ncols
2479    fid.nrows = nrows
2480    fid.xllcorner = xllcorner
2481    fid.yllcorner = yllcorner
2482    fid.cellsize = cellsize
2483    fid.NODATA_value = NODATA_value
2484
2485    fid.zone = zone
2486    fid.false_easting = false_easting
2487    fid.false_northing = false_northing
2488    fid.projection = projection
2489    fid.datum = datum
2490    fid.units = units
2491
2492
2493    # dimension definitions
2494    fid.createDimension('number_of_rows', nrows)
2495    fid.createDimension('number_of_columns', ncols)
2496
2497    # variable definitions
2498    fid.createVariable('elevation', Float, ('number_of_rows',
2499                                            'number_of_columns'))
2500
2501    # Get handles to the variables
2502    elevation = fid.variables['elevation']
2503
2504    #Store data
2505    n = len(lines[6:])
2506    for i, line in enumerate(lines[6:]):
2507        fields = line.split()
2508        if verbose and i%((n+10)/10)==0:
2509            print 'Processing row %d of %d' %(i, nrows)
2510
2511        elevation[i, :] = array([float(x) for x in fields])
2512
2513    fid.close()
2514
2515
2516
2517
2518
2519def ferret2sww(basename_in, basename_out = None,
2520               verbose = False,
2521               minlat = None, maxlat = None,
2522               minlon = None, maxlon = None,
2523               mint = None, maxt = None, mean_stage = 0,
2524               origin = None, zscale = 1,
2525               fail_on_NaN = True,
2526               NaN_filler = 0,
2527               elevation = None,
2528               inverted_bathymetry = False
2529               ): #FIXME: Bathymetry should be obtained
2530                                  #from MOST somehow.
2531                                  #Alternatively from elsewhere
2532                                  #or, as a last resort,
2533                                  #specified here.
2534                                  #The value of -100 will work
2535                                  #for the Wollongong tsunami
2536                                  #scenario but is very hacky
2537    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2538    sww format native to pyvolution.
2539
2540    Specify only basename_in and read files of the form
2541    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2542    relative height, x-velocity and y-velocity, respectively.
2543
2544    Also convert latitude and longitude to UTM. All coordinates are
2545    assumed to be given in the GDA94 datum.
2546
2547    min's and max's: If omitted - full extend is used.
2548    To include a value min may equal it, while max must exceed it.
2549    Lat and lon are assuemd to be in decimal degrees
2550
2551    origin is a 3-tuple with geo referenced
2552    UTM coordinates (zone, easting, northing)
2553
2554    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2555    which means that longitude is the fastest
2556    varying dimension (row major order, so to speak)
2557
2558    ferret2sww uses grid points as vertices in a triangular grid
2559    counting vertices from lower left corner upwards, then right
2560    """
2561
2562    import os
2563    from Scientific.IO.NetCDF import NetCDFFile
2564    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2565    from Numeric import allclose, around
2566
2567    precision = Float
2568
2569    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2570
2571    if minlat != None:
2572        assert -90 < minlat < 90 , msg
2573    if maxlat != None:
2574        assert -90 < maxlat < 90 , msg
2575    if minlon != None:
2576        assert -180 < minlon < 180 , msg
2577    if maxlon != None:
2578        assert -180 < maxlon < 180 , msg
2579
2580
2581    #Get NetCDF data
2582    if verbose: print 'Reading files %s_*.nc' %basename_in
2583    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2584    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2585    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2586    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2587
2588    if basename_out is None:
2589        swwname = basename_in + '.sww'
2590    else:
2591        swwname = basename_out + '.sww'
2592
2593#    Get dimensions of file_h
2594    for dimension in file_h.dimensions.keys():
2595        if dimension[:3] == 'LON':
2596            dim_h_longitude = dimension
2597        if dimension[:3] == 'LAT':
2598            dim_h_latitude = dimension
2599        if dimension[:4] == 'TIME':
2600            dim_h_time = dimension
2601
2602#    print 'long:', dim_h_longitude
2603#    print 'lats:', dim_h_latitude
2604#    print 'times:', dim_h_time
2605
2606    times = file_h.variables[dim_h_time]
2607    latitudes = file_h.variables[dim_h_latitude]
2608    longitudes = file_h.variables[dim_h_longitude]
2609   
2610# get dimensions for file_e
2611    for dimension in file_e.dimensions.keys():
2612        if dimension[:3] == 'LON':
2613            dim_e_longitude = dimension
2614        if dimension[:3] == 'LAT':
2615            dim_e_latitude = dimension
2616        if dimension[:4] == 'TIME':
2617            dim_e_time = dimension
2618
2619# get dimensions for file_u
2620    for dimension in file_u.dimensions.keys():
2621        if dimension[:3] == 'LON':
2622            dim_u_longitude = dimension
2623        if dimension[:3] == 'LAT':
2624            dim_u_latitude = dimension
2625        if dimension[:4] == 'TIME':
2626            dim_u_time = dimension
2627           
2628# get dimensions for file_v
2629    for dimension in file_v.dimensions.keys():
2630        if dimension[:3] == 'LON':
2631            dim_v_longitude = dimension
2632        if dimension[:3] == 'LAT':
2633            dim_v_latitude = dimension
2634        if dimension[:4] == 'TIME':
2635            dim_v_time = dimension
2636
2637
2638    #Precision used by most for lat/lon is 4 or 5 decimals
2639    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2640    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2641
2642    #Check that files are compatible
2643    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2644    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2645    assert allclose(latitudes, e_lat)
2646
2647    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2648    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2649    assert allclose(longitudes, e_lon)
2650
2651    if mint == None:
2652        jmin = 0
2653    else:
2654        jmin = searchsorted(times, mint)
2655
2656    if maxt == None:
2657        jmax=len(times)
2658    else:
2659        jmax = searchsorted(times, maxt)
2660
2661    if minlat == None:
2662        kmin=0
2663    else:
2664        kmin = searchsorted(latitudes, minlat)
2665
2666    if maxlat == None:
2667        kmax = len(latitudes)
2668    else:
2669        kmax = searchsorted(latitudes, maxlat)
2670
2671    if minlon == None:
2672        lmin=0
2673    else:
2674        lmin = searchsorted(longitudes, minlon)
2675
2676    if maxlon == None:
2677        lmax = len(longitudes)
2678    else:
2679        lmax = searchsorted(longitudes, maxlon)
2680
2681#    print' j', jmin, jmax
2682    times = times[jmin:jmax]
2683    latitudes = latitudes[kmin:kmax]
2684    longitudes = longitudes[lmin:lmax]
2685
2686
2687    if verbose: print 'cropping'
2688    zname = 'ELEVATION'
2689
2690    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2691    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2692    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2693    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2694
2695    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2696    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2697    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2698    #        from Numeric import asarray
2699    #        elevations=elevations.tolist()
2700    #        elevations.reverse()
2701    #        elevations=asarray(elevations)
2702    #    else:
2703    #        from Numeric import asarray
2704    #        elevations=elevations.tolist()
2705    #        elevations.reverse()
2706    #        elevations=asarray(elevations)
2707    #        'print hmmm'
2708
2709
2710
2711    #Get missing values
2712    nan_ha = file_h.variables['HA'].missing_value[0]
2713    nan_ua = file_u.variables['UA'].missing_value[0]
2714    nan_va = file_v.variables['VA'].missing_value[0]
2715    if hasattr(file_e.variables[zname],'missing_value'):
2716        nan_e  = file_e.variables[zname].missing_value[0]
2717    else:
2718        nan_e = None
2719
2720    #Cleanup
2721    from Numeric import sometrue
2722
2723    missing = (amplitudes == nan_ha)
2724    if sometrue (missing):
2725        if fail_on_NaN:
2726            msg = 'NetCDFFile %s contains missing values'\
2727                  %(basename_in+'_ha.nc')
2728            raise DataMissingValuesError, msg
2729        else:
2730            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2731
2732    missing = (uspeed == nan_ua)
2733    if sometrue (missing):
2734        if fail_on_NaN:
2735            msg = 'NetCDFFile %s contains missing values'\
2736                  %(basename_in+'_ua.nc')
2737            raise DataMissingValuesError, msg
2738        else:
2739            uspeed = uspeed*(missing==0) + missing*NaN_filler
2740
2741    missing = (vspeed == nan_va)
2742    if sometrue (missing):
2743        if fail_on_NaN:
2744            msg = 'NetCDFFile %s contains missing values'\
2745                  %(basename_in+'_va.nc')
2746            raise DataMissingValuesError, msg
2747        else:
2748            vspeed = vspeed*(missing==0) + missing*NaN_filler
2749
2750
2751    missing = (elevations == nan_e)
2752    if sometrue (missing):
2753        if fail_on_NaN:
2754            msg = 'NetCDFFile %s contains missing values'\
2755                  %(basename_in+'_e.nc')
2756            raise DataMissingValuesError, msg
2757        else:
2758            elevations = elevations*(missing==0) + missing*NaN_filler
2759
2760    #######
2761
2762
2763
2764    number_of_times = times.shape[0]
2765    number_of_latitudes = latitudes.shape[0]
2766    number_of_longitudes = longitudes.shape[0]
2767
2768    assert amplitudes.shape[0] == number_of_times
2769    assert amplitudes.shape[1] == number_of_latitudes
2770    assert amplitudes.shape[2] == number_of_longitudes
2771
2772    if verbose:
2773        print '------------------------------------------------'
2774        print 'Statistics:'
2775        print '  Extent (lat/lon):'
2776        print '    lat in [%f, %f], len(lat) == %d'\
2777              %(min(latitudes.flat), max(latitudes.flat),
2778                len(latitudes.flat))
2779        print '    lon in [%f, %f], len(lon) == %d'\
2780              %(min(longitudes.flat), max(longitudes.flat),
2781                len(longitudes.flat))
2782        print '    t in [%f, %f], len(t) == %d'\
2783              %(min(times.flat), max(times.flat), len(times.flat))
2784
2785        q = amplitudes.flat
2786        name = 'Amplitudes (ha) [cm]'
2787        print %s in [%f, %f]' %(name, min(q), max(q))
2788
2789        q = uspeed.flat
2790        name = 'Speeds (ua) [cm/s]'
2791        print %s in [%f, %f]' %(name, min(q), max(q))
2792
2793        q = vspeed.flat
2794        name = 'Speeds (va) [cm/s]'
2795        print %s in [%f, %f]' %(name, min(q), max(q))
2796
2797        q = elevations.flat
2798        name = 'Elevations (e) [m]'
2799        print %s in [%f, %f]' %(name, min(q), max(q))
2800
2801
2802    #print number_of_latitudes, number_of_longitudes
2803    number_of_points = number_of_latitudes*number_of_longitudes
2804    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2805
2806
2807    file_h.close()
2808    file_u.close()
2809    file_v.close()
2810    file_e.close()
2811
2812
2813    # NetCDF file definition
2814    outfile = NetCDFFile(swwname, 'w')
2815
2816    #Create new file
2817    outfile.institution = 'Geoscience Australia'
2818    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2819                          %(basename_in + '_ha.nc',
2820                            basename_in + '_ua.nc',
2821                            basename_in + '_va.nc',
2822                            basename_in + '_e.nc')
2823
2824
2825    #For sww compatibility
2826    outfile.smoothing = 'Yes'
2827    outfile.order = 1
2828
2829    #Start time in seconds since the epoch (midnight 1/1/1970)
2830    outfile.starttime = starttime = times[0]
2831    times = times - starttime  #Store relative times
2832
2833    # dimension definitions
2834    outfile.createDimension('number_of_volumes', number_of_volumes)
2835
2836    outfile.createDimension('number_of_vertices', 3)
2837    outfile.createDimension('number_of_points', number_of_points)
2838
2839
2840    #outfile.createDimension('number_of_timesteps', len(times))
2841    outfile.createDimension('number_of_timesteps', len(times))
2842
2843    # variable definitions
2844    outfile.createVariable('x', precision, ('number_of_points',))
2845    outfile.createVariable('y', precision, ('number_of_points',))
2846    outfile.createVariable('elevation', precision, ('number_of_points',))
2847
2848    #FIXME: Backwards compatibility
2849    outfile.createVariable('z', precision, ('number_of_points',))
2850    #################################
2851
2852    outfile.createVariable('volumes', Int, ('number_of_volumes',
2853                                            'number_of_vertices'))
2854
2855    outfile.createVariable('time', precision,
2856                           ('number_of_timesteps',))
2857
2858    outfile.createVariable('stage', precision,
2859                           ('number_of_timesteps',
2860                            'number_of_points'))
2861
2862    outfile.createVariable('xmomentum', precision,
2863                           ('number_of_timesteps',
2864                            'number_of_points'))
2865
2866    outfile.createVariable('ymomentum', precision,
2867                           ('number_of_timesteps',
2868                            'number_of_points'))
2869
2870
2871    #Store
2872    from coordinate_transforms.redfearn import redfearn
2873    x = zeros(number_of_points, Float)  #Easting
2874    y = zeros(number_of_points, Float)  #Northing
2875
2876
2877    if verbose: print 'Making triangular grid'
2878    #Check zone boundaries
2879    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2880
2881    vertices = {}
2882    i = 0
2883    for k, lat in enumerate(latitudes):       #Y direction
2884        for l, lon in enumerate(longitudes):  #X direction
2885
2886            vertices[l,k] = i
2887
2888            zone, easting, northing = redfearn(lat,lon)
2889
2890            msg = 'Zone boundary crossed at longitude =', lon
2891            #assert zone == refzone, msg
2892            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2893            x[i] = easting
2894            y[i] = northing
2895            i += 1
2896
2897
2898    #Construct 2 triangles per 'rectangular' element
2899    volumes = []
2900    for l in range(number_of_longitudes-1):    #X direction
2901        for k in range(number_of_latitudes-1): #Y direction
2902            v1 = vertices[l,k+1]
2903            v2 = vertices[l,k]
2904            v3 = vertices[l+1,k+1]
2905            v4 = vertices[l+1,k]
2906
2907            volumes.append([v1,v2,v3]) #Upper element
2908            volumes.append([v4,v3,v2]) #Lower element
2909
2910    volumes = array(volumes)
2911
2912    if origin == None:
2913        zone = refzone
2914        xllcorner = min(x)
2915        yllcorner = min(y)
2916    else:
2917        zone = origin[0]
2918        xllcorner = origin[1]
2919        yllcorner = origin[2]
2920
2921
2922    outfile.xllcorner = xllcorner
2923    outfile.yllcorner = yllcorner
2924    outfile.zone = zone
2925
2926
2927    if elevation is not None:
2928        z = elevation
2929    else:
2930        if inverted_bathymetry:
2931            z = -1*elevations
2932        else:
2933            z = elevations
2934    #FIXME: z should be obtained from MOST and passed in here
2935
2936    from Numeric import resize
2937    z = resize(z,outfile.variables['z'][:].shape)
2938    outfile.variables['x'][:] = x - xllcorner
2939    outfile.variables['y'][:] = y - yllcorner
2940    outfile.variables['z'][:] = z             #FIXME HACK
2941    outfile.variables['elevation'][:] = z
2942    outfile.variables['time'][:] = times   #Store time relative
2943    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2944
2945
2946
2947    #Time stepping
2948    stage = outfile.variables['stage']
2949    xmomentum = outfile.variables['xmomentum']
2950    ymomentum = outfile.variables['ymomentum']
2951
2952    if verbose: print 'Converting quantities'
2953    n = len(times)
2954    for j in range(n):
2955        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2956        i = 0
2957        for k in range(number_of_latitudes):      #Y direction
2958            for l in range(number_of_longitudes): #X direction
2959                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2960                stage[j,i] = w
2961                h = w - z[i]
2962                xmomentum[j,i] = uspeed[j,k,l]/100*h
2963                ymomentum[j,i] = vspeed[j,k,l]/100*h
2964                i += 1
2965
2966    #outfile.close()
2967
2968    #FIXME: Refactor using code from file_function.statistics
2969    #Something like print swwstats(swwname)
2970    if verbose:
2971        x = outfile.variables['x'][:]
2972        y = outfile.variables['y'][:]
2973        times = outfile.variables['time'][:]
2974        print '------------------------------------------------'
2975        print 'Statistics of output file:'
2976        print '  Name: %s' %swwname
2977        print '  Reference:'
2978        print '    Lower left corner: [%f, %f]'\
2979              %(xllcorner, yllcorner)
2980        print '    Start time: %f' %starttime
2981        print '  Extent:'
2982        print '    x [m] in [%f, %f], len(x) == %d'\
2983              %(min(x.flat), max(x.flat), len(x.flat))
2984        print '    y [m] in [%f, %f], len(y) == %d'\
2985              %(min(y.flat), max(y.flat), len(y.flat))
2986        print '    t [s] in [%f, %f], len(t) == %d'\
2987              %(min(times), max(times), len(times))
2988        print '  Quantities [SI units]:'
2989        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2990            q = outfile.variables[name][:].flat
2991            print '    %s in [%f, %f]' %(name, min(q), max(q))
2992
2993
2994
2995    outfile.close()
2996
2997
2998
2999
3000
3001def timefile2netcdf(filename, quantity_names = None):
3002    """Template for converting typical text files with time series to
3003    NetCDF tms file.
3004
3005
3006    The file format is assumed to be either two fields separated by a comma:
3007
3008        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3009
3010    E.g
3011
3012      31/08/04 00:00:00, 1.328223 0 0
3013      31/08/04 00:15:00, 1.292912 0 0
3014
3015    will provide a time dependent function f(t) with three attributes
3016
3017    filename is assumed to be the rootname with extenisons .txt and .sww
3018    """
3019
3020    import time, calendar
3021    from Numeric import array
3022    from config import time_format
3023    from utilities.numerical_tools import ensure_numeric
3024
3025
3026
3027    fid = open(filename + '.txt')
3028    line = fid.readline()
3029    fid.close()
3030
3031    fields = line.split(',')
3032    msg = 'File %s must have the format date, value0 value1 value2 ...'
3033    assert len(fields) == 2, msg
3034
3035    try:
3036        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3037    except ValueError:
3038        msg = 'First field in file %s must be' %filename
3039        msg += ' date-time with format %s.\n' %time_format
3040        msg += 'I got %s instead.' %fields[0]
3041        raise DataTimeError, msg
3042
3043
3044    #Split values
3045    values = []
3046    for value in fields[1].split():
3047        values.append(float(value))
3048
3049    q = ensure_numeric(values)
3050
3051    msg = 'ERROR: File must contain at least one independent value'
3052    assert len(q.shape) == 1, msg
3053
3054
3055
3056    #Read times proper
3057    from Numeric import zeros, Float, alltrue
3058    from config import time_format
3059    import time, calendar
3060
3061    fid = open(filename + '.txt')
3062    lines = fid.readlines()
3063    fid.close()
3064
3065    N = len(lines)
3066    d = len(q)
3067
3068    T = zeros(N, Float)       #Time
3069    Q = zeros((N, d), Float)  #Values
3070
3071    for i, line in enumerate(lines):
3072        fields = line.split(',')
3073        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3074
3075        T[i] = realtime - starttime
3076
3077        for j, value in enumerate(fields[1].split()):
3078            Q[i, j] = float(value)
3079
3080    msg = 'File %s must list time as a monotonuosly ' %filename
3081    msg += 'increasing sequence'
3082    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3083
3084
3085    #Create NetCDF file
3086    from Scientific.IO.NetCDF import NetCDFFile
3087
3088    fid = NetCDFFile(filename + '.tms', 'w')
3089
3090
3091    fid.institution = 'Geoscience Australia'
3092    fid.description = 'Time series'
3093
3094
3095    #Reference point
3096    #Start time in seconds since the epoch (midnight 1/1/1970)
3097    #FIXME: Use Georef
3098    fid.starttime = starttime
3099
3100
3101    # dimension definitions
3102    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3103    #fid.createDimension('number_of_vertices', 3)
3104
3105
3106    fid.createDimension('number_of_timesteps', len(T))
3107
3108    fid.createVariable('time', Float, ('number_of_timesteps',))
3109
3110    fid.variables['time'][:] = T
3111
3112    for i in range(Q.shape[1]):
3113        try:
3114            name = quantity_names[i]
3115        except:
3116            name = 'Attribute%d'%i
3117
3118        fid.createVariable(name, Float, ('number_of_timesteps',))
3119        fid.variables[name][:] = Q[:,i]
3120
3121    fid.close()
3122
3123
3124def extent_sww(file_name):
3125    """
3126    Read in an sww file.
3127
3128    Input;
3129    file_name - the sww file
3130
3131    Output;
3132    z - Vector of bed elevation
3133    volumes - Array.  Each row has 3 values, representing
3134    the vertices that define the volume
3135    time - Vector of the times where there is stage information
3136    stage - array with respect to time and vertices (x,y)
3137    """
3138
3139
3140    from Scientific.IO.NetCDF import NetCDFFile
3141
3142    #Check contents
3143    #Get NetCDF
3144    fid = NetCDFFile(file_name, 'r')
3145
3146    # Get the variables
3147    x = fid.variables['x'][:]
3148    y = fid.variables['y'][:]
3149    stage = fid.variables['stage'][:]
3150    #print "stage",stage
3151    #print "stage.shap",stage.shape
3152    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3153    #print "min(stage)",min(stage)
3154
3155    fid.close()
3156
3157    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3158
3159
3160def sww2domain(filename,boundary=None,t=None,\
3161               fail_if_NaN=True,NaN_filler=0\
3162               ,verbose = False,very_verbose = False):
3163    """
3164    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3165
3166    Boundary is not recommended if domain.smooth is not selected, as it
3167    uses unique coordinates, but not unique boundaries. This means that
3168    the boundary file will not be compatable with the coordinates, and will
3169    give a different final boundary, or crash.
3170    """
3171    NaN=9.969209968386869e+036
3172    #initialise NaN.
3173
3174    from Scientific.IO.NetCDF import NetCDFFile
3175    from shallow_water import Domain
3176    from Numeric import asarray, transpose, resize
3177
3178    if verbose: print 'Reading from ', filename
3179    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3180    time = fid.variables['time']       #Timesteps
3181    if t is None:
3182        t = time[-1]
3183    time_interp = get_time_interp(time,t)
3184
3185    # Get the variables as Numeric arrays
3186    x = fid.variables['x'][:]             #x-coordinates of vertices
3187    y = fid.variables['y'][:]             #y-coordinates of vertices
3188    elevation = fid.variables['elevation']     #Elevation
3189    stage = fid.variables['stage']     #Water level
3190    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3191    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3192
3193    starttime = fid.starttime[0]
3194    volumes = fid.variables['volumes'][:] #Connectivity
3195    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3196
3197    conserved_quantities = []
3198    interpolated_quantities = {}
3199    other_quantities = []
3200
3201    # get geo_reference
3202    #sww files don't have to have a geo_ref
3203    try:
3204        geo_reference = Geo_reference(NetCDFObject=fid)
3205    except: #AttributeError, e:
3206        geo_reference = None
3207
3208    if verbose: print '    getting quantities'
3209    for quantity in fid.variables.keys():
3210        dimensions = fid.variables[quantity].dimensions
3211        if 'number_of_timesteps' in dimensions:
3212            conserved_quantities.append(quantity)
3213            interpolated_quantities[quantity]=\
3214                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3215        else: other_quantities.append(quantity)
3216
3217    other_quantities.remove('x')
3218    other_quantities.remove('y')
3219    other_quantities.remove('z')
3220    other_quantities.remove('volumes')
3221
3222    conserved_quantities.remove('time')
3223
3224    if verbose: print '    building domain'
3225    #    From domain.Domain:
3226    #    domain = Domain(coordinates, volumes,\
3227    #                    conserved_quantities = conserved_quantities,\
3228    #                    other_quantities = other_quantities,zone=zone,\
3229    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3230
3231    #   From shallow_water.Domain:
3232    coordinates=coordinates.tolist()
3233    volumes=volumes.tolist()
3234    #FIXME:should this be in mesh?(peter row)
3235    if fid.smoothing == 'Yes': unique = False
3236    else: unique = True
3237    if unique:
3238        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3239
3240
3241    try:
3242        domain = Domain(coordinates, volumes, boundary)
3243    except AssertionError, e:
3244        fid.close()
3245        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3246        raise DataDomainError, msg
3247
3248    if not boundary is None:
3249        domain.boundary = boundary
3250
3251    domain.geo_reference = geo_reference
3252
3253    domain.starttime=float(starttime)+float(t)
3254    domain.time=0.0
3255
3256    for quantity in other_quantities:
3257        try:
3258            NaN = fid.variables[quantity].missing_value
3259        except:
3260            pass #quantity has no missing_value number
3261        X = fid.variables[quantity][:]
3262        if very_verbose:
3263            print '       ',quantity
3264            print '        NaN =',NaN
3265            print '        max(X)'
3266            print '       ',max(X)
3267            print '        max(X)==NaN'
3268            print '       ',max(X)==NaN
3269            print ''
3270        if (max(X)==NaN) or (min(X)==NaN):
3271            if fail_if_NaN:
3272                msg = 'quantity "%s" contains no_data entry'%quantity
3273                raise DataMissingValuesError, msg
3274            else:
3275                data = (X<>NaN)
3276                X = (X*data)+(data==0)*NaN_filler
3277        if unique:
3278            X = resize(X,(len(X)/3,3))
3279        domain.set_quantity(quantity,X)
3280    #
3281    for quantity in conserved_quantities:
3282        try:
3283            NaN = fid.variables[quantity].missing_value
3284        except:
3285            pass #quantity has no missing_value number
3286        X = interpolated_quantities[quantity]
3287        if very_verbose:
3288            print '       ',quantity
3289            print '        NaN =',NaN
3290            print '        max(X)'
3291            print '       ',max(X)
3292            print '        max(X)==NaN'
3293            print '       ',max(X)==NaN
3294            print ''
3295        if (max(X)==NaN) or (min(X)==NaN):
3296            if fail_if_NaN:
3297                msg = 'quantity "%s" contains no_data entry'%quantity
3298                raise DataMissingValuesError, msg
3299            else:
3300                data = (X<>NaN)
3301                X = (X*data)+(data==0)*NaN_filler
3302        if unique:
3303            X = resize(X,(X.shape[0]/3,3))
3304        domain.set_quantity(quantity,X)
3305
3306    fid.close()
3307    return domain
3308
3309def interpolated_quantity(saved_quantity,time_interp):
3310
3311    #given an index and ratio, interpolate quantity with respect to time.
3312    index,ratio = time_interp
3313    Q = saved_quantity
3314    if ratio > 0:
3315        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3316    else:
3317        q = Q[index]
3318    #Return vector of interpolated values
3319    return q
3320
3321def get_time_interp(time,t=None):
3322    #Finds the ratio and index for time interpolation.
3323    #It is borrowed from previous pyvolution code.
3324    if t is None:
3325        t=time[-1]
3326        index = -1
3327        ratio = 0.
3328    else:
3329        T = time
3330        tau = t
3331        index=0
3332        msg = 'Time interval derived from file %s [%s:%s]'\
3333            %('FIXMEfilename', T[0], T[-1])
3334        msg += ' does not match model time: %s' %tau
3335        if tau < time[0]: raise DataTimeError, msg
3336        if tau > time[-1]: raise DataTimeError, msg
3337        while tau > time[index]: index += 1
3338        while tau < time[index]: index -= 1
3339        if tau == time[index]:
3340            #Protect against case where tau == time[-1] (last time)
3341            # - also works in general when tau == time[i]
3342            ratio = 0
3343        else:
3344            #t is now between index and index+1
3345            ratio = (tau - time[index])/(time[index+1] - time[index])
3346    return (index,ratio)
3347
3348
3349def weed(coordinates,volumes,boundary = None):
3350    if type(coordinates)=='array':
3351        coordinates = coordinates.tolist()
3352    if type(volumes)=='array':
3353        volumes = volumes.tolist()
3354
3355    unique = False
3356    point_dict = {}
3357    same_point = {}
3358    for i in range(len(coordinates)):
3359        point = tuple(coordinates[i])
3360        if point_dict.has_key(point):
3361            unique = True
3362            same_point[i]=point
3363            #to change all point i references to point j
3364        else:
3365            point_dict[point]=i
3366            same_point[i]=point
3367
3368    coordinates = []
3369    i = 0
3370    for point in point_dict.keys():
3371        point = tuple(point)
3372        coordinates.append(list(point))
3373        point_dict[point]=i
3374        i+=1
3375
3376
3377    for volume in volumes:
3378        for i in range(len(volume)):
3379            index = volume[i]
3380            if index>-1:
3381                volume[i]=point_dict[same_point[index]]
3382
3383    new_boundary = {}
3384    if not boundary is None:
3385        for segment in boundary.keys():
3386            point0 = point_dict[same_point[segment[0]]]
3387            point1 = point_dict[same_point[segment[1]]]
3388            label = boundary[segment]
3389            #FIXME should the bounday attributes be concaterated
3390            #('exterior, pond') or replaced ('pond')(peter row)
3391
3392            if new_boundary.has_key((point0,point1)):
3393                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3394                                              #+','+label
3395
3396            elif new_boundary.has_key((point1,point0)):
3397                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3398                                              #+','+label
3399            else: new_boundary[(point0,point1)]=label
3400
3401        boundary = new_boundary
3402
3403    return coordinates,volumes,boundary
3404
3405
3406def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3407                 verbose=False):
3408    """Read Digitial Elevation model from the following NetCDF format (.dem)
3409
3410    Example:
3411
3412    ncols         3121
3413    nrows         1800
3414    xllcorner     722000
3415    yllcorner     5893000
3416    cellsize      25
3417    NODATA_value  -9999
3418    138.3698 137.4194 136.5062 135.5558 ..........
3419
3420    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3421    """
3422
3423    import os
3424    from Scientific.IO.NetCDF import NetCDFFile
3425    from Numeric import Float, zeros, sum, reshape, equal
3426
3427    root = basename_in
3428    inname = root + '.dem'
3429
3430    #Open existing netcdf file to read
3431    infile = NetCDFFile(inname, 'r')
3432    if verbose: print 'Reading DEM from %s' %inname
3433
3434    #Read metadata
3435    ncols = infile.ncols[0]
3436    nrows = infile.nrows[0]
3437    xllcorner = infile.xllcorner[0]
3438    yllcorner = infile.yllcorner[0]
3439    cellsize = infile.cellsize[0]
3440    NODATA_value = infile.NODATA_value[0]
3441    zone = infile.zone[0]
3442    false_easting = infile.false_easting[0]
3443    false_northing = infile.false_northing[0]
3444    projection = infile.projection
3445    datum = infile.datum
3446    units = infile.units
3447
3448    dem_elevation = infile.variables['elevation']
3449
3450    #Get output file name
3451    if basename_out == None:
3452        outname = root + '_' + repr(cellsize_new) + '.dem'
3453    else:
3454        outname = basename_out + '.dem'
3455
3456    if verbose: print 'Write decimated NetCDF file to %s' %outname
3457
3458    #Determine some dimensions for decimated grid
3459    (nrows_stencil, ncols_stencil) = stencil.shape
3460    x_offset = ncols_stencil / 2
3461    y_offset = nrows_stencil / 2
3462    cellsize_ratio = int(cellsize_new / cellsize)
3463    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3464    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3465
3466    #Open netcdf file for output
3467    outfile = NetCDFFile(outname, 'w')
3468
3469    #Create new file
3470    outfile.institution = 'Geoscience Australia'
3471    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3472                           'of spatial point data'
3473    #Georeferencing
3474    outfile.zone = zone
3475    outfile.projection = projection
3476    outfile.datum = datum
3477    outfile.units = units
3478
3479    outfile.cellsize = cellsize_new
3480    outfile.NODATA_value = NODATA_value
3481    outfile.false_easting = false_easting
3482    outfile.false_northing = false_northing
3483
3484    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3485    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3486    outfile.ncols = ncols_new
3487    outfile.nrows = nrows_new
3488
3489    # dimension definition
3490    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3491
3492    # variable definition
3493    outfile.createVariable('elevation', Float, ('number_of_points',))
3494
3495    # Get handle to the variable
3496    elevation = outfile.variables['elevation']
3497
3498    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3499
3500    #Store data
3501    global_index = 0
3502    for i in range(nrows_new):
3503        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3504        lower_index = global_index
3505        telev =  zeros(ncols_new, Float)
3506        local_index = 0
3507        trow = i * cellsize_ratio
3508
3509        for j in range(ncols_new):
3510            tcol = j * cellsize_ratio
3511            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3512
3513            #if dem contains 1 or more NODATA_values set value in
3514            #decimated dem to NODATA_value, else compute decimated
3515            #value using stencil
3516            if sum(sum(equal(tmp, NODATA_value))) > 0:
3517                telev[local_index] = NODATA_value
3518            else:
3519                telev[local_index] = sum(sum(tmp * stencil))
3520
3521            global_index += 1
3522            local_index += 1
3523
3524        upper_index = global_index
3525
3526        elevation[lower_index:upper_index] = telev
3527
3528    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3529
3530    infile.close()
3531    outfile.close()
3532
3533
3534
3535
3536def tsh2sww(filename, verbose=False): #test_tsh2sww
3537    """
3538    to check if a tsh/msh file 'looks' good.
3539    """
3540
3541    from shallow_water import Domain
3542    from pmesh2domain import pmesh_to_domain_instance
3543    import time, os
3544    from data_manager import get_dataobject
3545    from os import sep, path
3546    from utilities.numerical_tools import mean
3547
3548    if verbose == True:print 'Creating domain from', filename
3549    domain = pmesh_to_domain_instance(filename, Domain)
3550    if verbose == True:print "Number of triangles = ", len(domain)
3551
3552    domain.smooth = True
3553    domain.format = 'sww'   #Native netcdf visualisation format
3554    file_path, filename = path.split(filename)
3555    filename, ext = path.splitext(filename)
3556    domain.filename = filename
3557    domain.reduction = mean
3558    if verbose == True:print "file_path",file_path
3559    if file_path == "":file_path = "."
3560    domain.set_datadir(file_path)
3561
3562    if verbose == True:
3563        print "Output written to " + domain.get_datadir() + sep + \
3564              domain.filename + "." + domain.format
3565    sww = get_dataobject(domain)
3566    sww.store_connectivity()
3567    sww.store_timestep('stage')
3568
3569
3570def asc_csiro2sww(bath_dir,
3571                  elevation_dir,
3572                  ucur_dir,
3573                  vcur_dir,
3574                  sww_file,
3575                  minlat = None, maxlat = None,
3576                  minlon = None, maxlon = None,
3577                  zscale=1,
3578                  mean_stage = 0,
3579                  fail_on_NaN = True,
3580                  elevation_NaN_filler = 0,
3581                  bath_prefix='ba',
3582                  elevation_prefix='el',
3583                  verbose=False):
3584    """
3585    Produce an sww boundary file, from esri ascii data from CSIRO.
3586
3587    Also convert latitude and longitude to UTM. All coordinates are
3588    assumed to be given in the GDA94 datum.
3589
3590    assume:
3591    All files are in esri ascii format
3592
3593    4 types of information
3594    bathymetry
3595    elevation
3596    u velocity
3597    v velocity
3598
3599    Assumptions
3600    The metadata of all the files is the same
3601    Each type is in a seperate directory
3602    One bath file with extention .000
3603    The time period is less than 24hrs and uniform.
3604    """
3605    from Scientific.IO.NetCDF import NetCDFFile
3606
3607    from coordinate_transforms.redfearn import redfearn
3608
3609    precision = Float # So if we want to change the precision its done here
3610
3611    # go in to the bath dir and load the only file,
3612    bath_files = os.listdir(bath_dir)
3613    #print "bath_files",bath_files
3614
3615    #fixme: make more general?
3616    bath_file = bath_files[0]
3617    bath_dir_file =  bath_dir + os.sep + bath_file
3618    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3619    #print "bath_metadata",bath_metadata
3620    #print "bath_grid",bath_grid
3621
3622    #Use the date.time of the bath file as a basis for
3623    #the start time for other files
3624    base_start = bath_file[-12:]
3625
3626    #go into the elevation dir and load the 000 file
3627    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3628                         + base_start
3629    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3630    #print "elevation_dir_file",elevation_dir_file
3631    #print "elevation_grid", elevation_grid
3632
3633    elevation_files = os.listdir(elevation_dir)
3634    ucur_files = os.listdir(ucur_dir)
3635    vcur_files = os.listdir(vcur_dir)
3636
3637    # the first elevation file should be the
3638    # file with the same base name as the bath data
3639    #print "elevation_files[0]",elevation_files[0]
3640    #print "'el' + base_start",'el' + base_start
3641    assert elevation_files[0] == 'el' + base_start
3642
3643    #assert bath_metadata == elevation_metadata
3644
3645
3646
3647    number_of_latitudes = bath_grid.shape[0]
3648    number_of_longitudes = bath_grid.shape[1]
3649    #number_of_times = len(os.listdir(elevation_dir))
3650    #number_of_points = number_of_latitudes*number_of_longitudes
3651    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3652
3653    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3654                  for x in range(number_of_longitudes)]
3655    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3656                 for y in range(number_of_latitudes)]
3657
3658     # reverse order of lat, so the fist lat represents the first grid row
3659    latitudes.reverse()
3660
3661    #print "latitudes - before _get_min_max_indexes",latitudes
3662    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3663                                                 minlat=minlat, maxlat=maxlat,
3664                                                 minlon=minlon, maxlon=maxlon)
3665
3666
3667    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3668    #print "bath_grid",bath_grid
3669    latitudes = latitudes[kmin:kmax]
3670    longitudes = longitudes[lmin:lmax]
3671    number_of_latitudes = len(latitudes)
3672    number_of_longitudes = len(longitudes)
3673    number_of_times = len(os.listdir(elevation_dir))
3674    number_of_points = number_of_latitudes*number_of_longitudes
3675    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3676    #print "number_of_points",number_of_points
3677
3678    #Work out the times
3679    if len(elevation_files) > 1:
3680        # Assume: The time period is less than 24hrs.
3681        time_period = (int(elevation_files[1][-3:]) - \
3682                      int(elevation_files[0][-3:]))*60*60
3683        times = [x*time_period for x in range(len(elevation_files))]
3684    else:
3685        times = [0.0]
3686    #print "times", times
3687    #print "number_of_latitudes", number_of_latitudes
3688    #print "number_of_longitudes", number_of_longitudes
3689    #print "number_of_times", number_of_times
3690    #print "latitudes", latitudes
3691    #print "longitudes", longitudes
3692
3693
3694    if verbose:
3695        print '------------------------------------------------'
3696        print 'Statistics:'
3697        print '  Extent (lat/lon):'
3698        print '    lat in [%f, %f], len(lat) == %d'\
3699              %(min(latitudes), max(latitudes),
3700                len(latitudes))
3701        print '    lon in [%f, %f], len(lon) == %d'\
3702              %(min(longitudes), max(longitudes),
3703                len(longitudes))
3704        print '    t in [%f, %f], len(t) == %d'\
3705              %(min(times), max(times), len(times))
3706
3707    ######### WRITE THE SWW FILE #############
3708    # NetCDF file definition
3709    outfile = NetCDFFile(sww_file, 'w')
3710
3711    #Create new file
3712    outfile.institution = 'Geoscience Australia'
3713    outfile.description = 'Converted from XXX'
3714
3715
3716    #For sww compatibility
3717    outfile.smoothing = 'Yes'
3718    outfile.order = 1
3719
3720    #Start time in seconds since the epoch (midnight 1/1/1970)
3721    outfile.starttime = starttime = times[0]
3722
3723
3724    # dimension definitions
3725    outfile.createDimension('number_of_volumes', number_of_volumes)
3726
3727    outfile.createDimension('number_of_vertices', 3)
3728    outfile.createDimension('number_of_points', number_of_points)
3729    outfile.createDimension('number_of_timesteps', number_of_times)
3730
3731    # variable definitions
3732    outfile.createVariable('x', precision, ('number_of_points',))
3733    outfile.createVariable('y', precision, ('number_of_points',))
3734    outfile.createVariable('elevation', precision, ('number_of_points',))
3735
3736    #FIXME: Backwards compatibility
3737    outfile.createVariable('z', precision, ('number_of_points',))
3738    #################################
3739
3740    outfile.createVariable('volumes', Int, ('number_of_volumes',
3741                                            'number_of_vertices'))
3742
3743    outfile.createVariable('time', precision,
3744                           ('number_of_timesteps',))
3745
3746    outfile.createVariable('stage', precision,
3747                           ('number_of_timesteps',
3748                            'number_of_points'))
3749
3750    outfile.createVariable('xmomentum', precision,
3751                           ('number_of_timesteps',
3752                            'number_of_points'))
3753
3754    outfile.createVariable('ymomentum', precision,
3755                           ('number_of_timesteps',
3756                            'number_of_points'))
3757
3758    #Store
3759    from coordinate_transforms.redfearn import redfearn
3760    x = zeros(number_of_points, Float)  #Easting
3761    y = zeros(number_of_points, Float)  #Northing
3762
3763    if verbose: print 'Making triangular grid'
3764    #Get zone of 1st point.
3765    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3766
3767    vertices = {}
3768    i = 0
3769    for k, lat in enumerate(latitudes):
3770        for l, lon in enumerate(longitudes):
3771
3772            vertices[l,k] = i
3773
3774            zone, easting, northing = redfearn(lat,lon)
3775
3776            msg = 'Zone boundary crossed at longitude =', lon
3777            #assert zone == refzone, msg
3778            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3779            x[i] = easting
3780            y[i] = northing
3781            i += 1
3782
3783
3784    #Construct 2 triangles per 'rectangular' element
3785    volumes = []
3786    for l in range(number_of_longitudes-1):    #X direction
3787        for k in range(number_of_latitudes-1): #Y direction
3788            v1 = vertices[l,k+1]
3789            v2 = vertices[l,k]
3790            v3 = vertices[l+1,k+1]
3791            v4 = vertices[l+1,k]
3792
3793            #Note, this is different to the ferrit2sww code
3794            #since the order of the lats is reversed.
3795            volumes.append([v1,v3,v2]) #Upper element
3796            volumes.append([v4,v2,v3]) #Lower element
3797
3798    volumes = array(volumes)
3799
3800    geo_ref = Geo_reference(refzone,min(x),min(y))
3801    geo_ref.write_NetCDF(outfile)
3802
3803    # This will put the geo ref in the middle
3804    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3805
3806
3807    if verbose:
3808        print '------------------------------------------------'
3809        print 'More Statistics:'
3810        print '  Extent (/lon):'
3811        print '    x in [%f, %f], len(lat) == %d'\
3812              %(min(x), max(x),
3813                len(x))
3814        print '    y in [%f, %f], len(lon) == %d'\
3815              %(min(y), max(y),
3816                len(y))
3817        print 'geo_ref: ',geo_ref
3818
3819    z = resize(bath_grid,outfile.variables['z'][:].shape)
3820    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3821    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3822    outfile.variables['z'][:] = z
3823    outfile.variables['elevation'][:] = z  #FIXME HACK
3824    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3825
3826    # do this to create an ok sww file.
3827    #outfile.variables['time'][:] = [0]   #Store time relative
3828    #outfile.variables['stage'] = z
3829    # put the next line up in the code after outfile.order = 1
3830    #number_of_times = 1
3831
3832    stage = outfile.variables['stage']
3833    xmomentum = outfile.variables['xmomentum']
3834    ymomentum = outfile.variables['ymomentum']
3835
3836    outfile.variables['time'][:] = times   #Store time relative
3837
3838    if verbose: print 'Converting quantities'
3839    n = number_of_times
3840    for j in range(number_of_times):
3841        # load in files
3842        elevation_meta, elevation_grid = \
3843           _read_asc(elevation_dir + os.sep + elevation_files[j])
3844
3845        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3846        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3847
3848        #print "elevation_grid",elevation_grid
3849        #cut matrix to desired size
3850        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3851        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3852        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3853        #print "elevation_grid",elevation_grid
3854        # handle missing values
3855        missing = (elevation_grid == elevation_meta['NODATA_value'])
3856        if sometrue (missing):
3857            if fail_on_NaN:
3858                msg = 'File %s contains missing values'\
3859                      %(elevation_files[j])
3860                raise DataMissingValuesError, msg
3861            else:
3862                elevation_grid = elevation_grid*(missing==0) + \
3863                                 missing*elevation_NaN_filler
3864
3865
3866        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3867        i = 0
3868        for k in range(number_of_latitudes):      #Y direction
3869            for l in range(number_of_longitudes): #X direction
3870                w = zscale*elevation_grid[k,l] + mean_stage
3871                stage[j,i] = w
3872                h = w - z[i]
3873                xmomentum[j,i] = u_momentum_grid[k,l]*h
3874                ymomentum[j,i] = v_momentum_grid[k,l]*h
3875                i += 1
3876    outfile.close()
3877
3878def _get_min_max_indexes(latitudes,longitudes,
3879                        minlat=None, maxlat=None,
3880                        minlon=None, maxlon=None):
3881    """
3882    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3883    specified with min/max lat/long
3884
3885    Think of the latitudes and longitudes describing a 2d surface.
3886    The area returned is, if possible, just big enough to cover the
3887    inputed max/min area. (This will not be possible if the max/min area
3888    has a section outside of the latitudes/longitudes area.)
3889
3890    assume latitudess & longitudes are sorted,
3891    long - from low to high (west to east, eg 148 - 151)
3892    lat - from high to low (north to south, eg -35 - -38)
3893    """
3894
3895    # reverse order of lat, so it's in ascending order
3896    latitudes.reverse()
3897    largest_lat_index = len(latitudes)-1
3898    #Cut out a smaller extent.
3899    if minlat == None:
3900        lat_min_index = 0
3901    else:
3902        lat_min_index = searchsorted(latitudes, minlat)-1
3903        if lat_min_index <0:
3904            lat_min_index = 0
3905
3906
3907    if maxlat == None:
3908        lat_max_index = largest_lat_index #len(latitudes)
3909    else:
3910        lat_max_index = searchsorted(latitudes, maxlat)
3911        if lat_max_index > largest_lat_index:
3912            lat_max_index = largest_lat_index
3913
3914    if minlon == None:
3915        lon_min_index = 0
3916    else:
3917        lon_min_index = searchsorted(longitudes, minlon)-1
3918        if lon_min_index <0:
3919            lon_min_index = 0
3920
3921    if maxlon == None:
3922        lon_max_index = len(longitudes)
3923    else:
3924        lon_max_index = searchsorted(longitudes, maxlon)
3925
3926    #Take into account that the latitude list was reversed
3927    latitudes.reverse() # Python passes by reference, need to swap back
3928    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3929                                   largest_lat_index - lat_min_index
3930    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3931    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3932
3933    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3934
3935
3936def _read_asc(filename, verbose=False):
3937    """Read esri file from the following ASCII format (.asc)
3938
3939    Example:
3940
3941    ncols         3121
3942    nrows         1800
3943    xllcorner     722000
3944    yllcorner     5893000
3945    cellsize      25
3946    NODATA_value  -9999
3947    138.3698 137.4194 136.5062 135.5558 ..........
3948
3949    """
3950
3951    datafile = open(filename)
3952
3953    if verbose: print 'Reading DEM from %s' %(filename)
3954    lines = datafile.readlines()
3955    datafile.close()
3956
3957    if verbose: print 'Got', len(lines), ' lines'
3958
3959    ncols = int(lines.pop(0).split()[1].strip())
3960    nrows = int(lines.pop(0).split()[1].strip())
3961    xllcorner = float(lines.pop(0).split()[1].strip())
3962    yllcorner = float(lines.pop(0).split()[1].strip())
3963    cellsize = float(lines.pop(0).split()[1].strip())
3964    NODATA_value = float(lines.pop(0).split()[1].strip())
3965
3966    assert len(lines) == nrows
3967
3968    #Store data
3969    grid = []
3970
3971    n = len(lines)
3972    for i, line in enumerate(lines):
3973        cells = line.split()
3974        assert len(cells) == ncols
3975        grid.append(array([float(x) for x in cells]))
3976    grid = array(grid)
3977
3978    return {'xllcorner':xllcorner,
3979            'yllcorner':yllcorner,
3980            'cellsize':cellsize,
3981            'NODATA_value':NODATA_value}, grid
3982
3983
3984# FIXME (Ole): Is this doing anything at all?
3985def sww2timeseries(swwfile,
3986                   gauge_filename,
3987                   gauge_data_outname,
3988                   quantity = None,
3989                   time_min = None,
3990                   time_max = None,
3991                   verbose = False):
3992
3993    """Read SWW file and extract time series for prescribed quantities at
3994    gauge locations.
3995
3996    The gauge locations are defined in gauge_filename. This file should be
3997    in the form: gaugename, easting, northing, and should be stored in a
3998    .csv or .xya file.
3999
4000    Time series data at the gauges can be written to file (for example,
4001    Benfield requested raw data) with the default being no data written.
4002
4003    The parameter quantity must be the name of an existing quantity or
4004    an expression involving existing quantities. The default is
4005    'depth'.
4006
4007    The user can define a list of quantities. The possibilities are
4008    the conserved quantitues of the shallow water wave equation and other
4009    quantities which can be derived from those, i.e.
4010    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
4011
4012    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
4013    Note, units of momentum are m^2/s and depth is m.
4014
4015    Velocity is absolute momentum divided by depth. (Confirming correct units:
4016    vel = abs mom / depth = (m^2/s)/m = m/s.)
4017
4018    Bearing returns the angle of the velocity vector from North.
4019
4020    If time_min and time_max is given, output plotted in that time range.
4021    The default is to plot the entire range of the time evident in sww file.
4022
4023    The export graphic format in 'png' and will be stored in the same
4024    directory as the input file.
4025    """
4026
4027    if quantity is None: quantity = 'depth'
4028
4029    # extract gauge locations from gauge file
4030
4031    # extract all quantities from swwfile (f = file_function)
4032    if time_min is None: time_min = min(f.get_time())
4033    if time_max is None: time_max = max(f.get_time())
4034
4035    # loop through appropriate range of time
4036    # plot prescribed quantities and export data if requested
4037
4038    #if gauge_data_outname is None:
4039
4040    return
4041
4042#-------------------------------------------------------------
4043if __name__ == "__main__":
4044    pass
4045
Note: See TracBrowser for help on using the repository browser.