source: inundation/pyvolution/data_manager.py @ 3354

Last change on this file since 3354 was 3336, checked in by duncan, 19 years ago

exposure csv class can now have x, y location info

File size: 121.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62
63from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
64     searchsorted, zeros, allclose, around
65
66
67from coordinate_transforms.geo_reference import Geo_reference
68from geospatial_data.geospatial_data import Geospatial_data
69
70
71def make_filename(s):
72    """Transform argument string into a suitable filename
73    """
74
75    s = s.strip()
76    s = s.replace(' ', '_')
77    s = s.replace('(', '')
78    s = s.replace(')', '')
79    s = s.replace('__', '_')
80
81    return s
82
83
84def check_dir(path, verbose=None):
85    """Check that specified path exists.
86    If path does not exist it will be created if possible
87
88    USAGE:
89       checkdir(path, verbose):
90
91    ARGUMENTS:
92        path -- Directory
93        verbose -- Flag verbose output (default: None)
94
95    RETURN VALUE:
96        Verified path including trailing separator
97
98    """
99
100    import os, sys
101    import os.path
102
103    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
104        unix = 0
105    else:
106        unix = 1
107
108
109    if path[-1] != os.sep:
110        path = path + os.sep  # Add separator for directories
111
112    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
113    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
114        try:
115            exitcode=os.mkdir(path)
116
117            # Change access rights if possible
118            #
119            if unix:
120                exitcode=os.system('chmod 775 '+path)
121            else:
122                pass  # FIXME: What about acces rights under Windows?
123
124            if verbose: print 'MESSAGE: Directory', path, 'created.'
125
126        except:
127            print 'WARNING: Directory', path, 'could not be created.'
128            if unix:
129                path = '/tmp/'
130            else:
131                path = 'C:'
132
133            print 'Using directory %s instead' %path
134
135    return(path)
136
137
138
139def del_dir(path):
140    """Recursively delete directory path and all its contents
141    """
142
143    import os
144
145    if os.path.isdir(path):
146        for file in os.listdir(path):
147            X = os.path.join(path, file)
148
149
150            if os.path.isdir(X) and not os.path.islink(X):
151                del_dir(X)
152            else:
153                try:
154                    os.remove(X)
155                except:
156                    print "Could not remove file %s" %X
157
158        os.rmdir(path)
159
160
161
162def create_filename(datadir, filename, format, size=None, time=None):
163
164    import os
165    #from config import data_dir
166
167    FN = check_dir(datadir) + filename
168
169    if size is not None:
170        FN += '_size%d' %size
171
172    if time is not None:
173        FN += '_time%.2f' %time
174
175    FN += '.' + format
176    return FN
177
178
179def get_files(datadir, filename, format, size):
180    """Get all file (names) with gven name, size and format
181    """
182
183    import glob
184
185    import os
186    #from config import data_dir
187
188    dir = check_dir(datadir)
189
190    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
191    return glob.glob(pattern)
192
193
194
195#Generic class for storing output to e.g. visualisation or checkpointing
196class Data_format:
197    """Generic interface to data formats
198    """
199
200
201    def __init__(self, domain, extension, mode = 'w'):
202        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
203                                        '''   'w' (write)'''+\
204                                        '''   'r' (read)''' +\
205                                        '''   'a' (append)'''
206
207        #Create filename
208        #self.filename = create_filename(domain.get_datadir(),
209        #domain.get_name(), extension, len(domain))
210
211
212        self.filename = create_filename(domain.get_datadir(),
213            domain.get_name(), extension)
214
215        #print 'F', self.filename
216        self.timestep = 0
217        self.number_of_volumes = len(domain)
218        self.domain = domain
219
220
221        #FIXME: Should we have a general set_precision function?
222
223
224
225#Class for storing output to e.g. visualisation
226class Data_format_sww(Data_format):
227    """Interface to native NetCDF format (.sww) for storing model output
228
229    There are two kinds of data
230
231    1: Constant data: Vertex coordinates and field values. Stored once
232    2: Variable data: Conserved quantities. Stored once per timestep.
233
234    All data is assumed to reside at vertex locations.
235    """
236
237
238    def __init__(self, domain, mode = 'w',\
239                 max_size = 2000000000,
240                 recursion = False):
241        from Scientific.IO.NetCDF import NetCDFFile
242        from Numeric import Int, Float, Float32
243
244        self.precision = Float32 #Use single precision for quantities
245        if hasattr(domain, 'max_size'):
246            self.max_size = domain.max_size #file size max is 2Gig
247        else:
248            self.max_size = max_size
249        self.recursion = recursion
250        self.mode = mode
251
252        Data_format.__init__(self, domain, 'sww', mode)
253
254
255        # NetCDF file definition
256        fid = NetCDFFile(self.filename, mode)
257
258        if mode == 'w':
259
260            #Create new file
261            fid.institution = 'Geoscience Australia'
262            fid.description = 'Output from pyvolution suitable for plotting'
263
264            if domain.smooth:
265                fid.smoothing = 'Yes'
266            else:
267                fid.smoothing = 'No'
268
269            fid.order = domain.default_order
270
271            if hasattr(domain, 'texture'):
272                fid.texture = domain.texture
273        #else:
274            #    fid.texture = 'None'
275
276            #Reference point
277            #Start time in seconds since the epoch (midnight 1/1/1970)
278            #FIXME: Use Georef
279            fid.starttime = domain.starttime
280
281            # dimension definitions
282            fid.createDimension('number_of_volumes', self.number_of_volumes)
283            fid.createDimension('number_of_vertices', 3)
284
285            if domain.smooth is True:
286                fid.createDimension('number_of_points', len(domain.vertexlist))
287            else:
288                fid.createDimension('number_of_points', 3*self.number_of_volumes)
289
290            fid.createDimension('number_of_timesteps', None) #extensible
291
292            # variable definitions
293            fid.createVariable('x', self.precision, ('number_of_points',))
294            fid.createVariable('y', self.precision, ('number_of_points',))
295            fid.createVariable('elevation', self.precision, ('number_of_points',))
296            if domain.geo_reference is not None:
297                domain.geo_reference.write_NetCDF(fid)
298
299            #FIXME: Backwards compatibility
300            fid.createVariable('z', self.precision, ('number_of_points',))
301            #################################
302
303            fid.createVariable('volumes', Int, ('number_of_volumes',
304                                                'number_of_vertices'))
305
306            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
307                                               # close to each other may appear as the same step
308                               ('number_of_timesteps',))
309
310            fid.createVariable('stage', self.precision,
311                               ('number_of_timesteps',
312                                'number_of_points'))
313
314            fid.createVariable('xmomentum', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318            fid.createVariable('ymomentum', self.precision,
319                               ('number_of_timesteps',
320                                'number_of_points'))
321
322        #Close
323        fid.close()
324
325
326    def store_connectivity(self):
327        """Specialisation of store_connectivity for net CDF format
328
329        Writes x,y,z coordinates of triangles constituting
330        the bed elevation.
331        """
332
333        from Scientific.IO.NetCDF import NetCDFFile
334
335        from Numeric import concatenate, Int
336
337        domain = self.domain
338
339        #Get NetCDF
340        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
341
342        # Get the variables
343        x = fid.variables['x']
344        y = fid.variables['y']
345        z = fid.variables['elevation']
346
347        volumes = fid.variables['volumes']
348
349        # Get X, Y and bed elevation Z
350        Q = domain.quantities['elevation']
351        X,Y,Z,V = Q.get_vertex_values(xy=True,
352                      precision = self.precision)
353
354
355
356        x[:] = X.astype(self.precision)
357        y[:] = Y.astype(self.precision)
358        z[:] = Z.astype(self.precision)
359
360        #FIXME: Backwards compatibility
361        z = fid.variables['z']
362        z[:] = Z.astype(self.precision)
363        ################################
364
365        volumes[:] = V.astype(volumes.typecode())
366
367        #Close
368        fid.close()
369
370    def store_timestep(self, names):
371        """Store time and named quantities to file
372        """
373        from Scientific.IO.NetCDF import NetCDFFile
374        import types
375        from time import sleep
376        from os import stat
377
378        minimum_allowed_depth = 0.001
379        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
380        from Numeric import choose
381
382        #Get NetCDF
383        retries = 0
384        file_open = False
385        while not file_open and retries < 10:
386            try:
387                fid = NetCDFFile(self.filename, 'a') #Open existing file
388            except IOError:
389                #This could happen if someone was reading the file.
390                #In that case, wait a while and try again
391                msg = 'Warning (store_timestep): File %s could not be opened'\
392                      %self.filename
393                msg += ' - trying step %s again' %self.domain.time
394                print msg
395                retries += 1
396                sleep(1)
397            else:
398                file_open = True
399
400        if not file_open:
401            msg = 'File %s could not be opened for append' %self.filename
402            raise DataFileNotOpenError, msg
403
404
405
406        #Check to see if the file is already too big:
407        time = fid.variables['time']
408        i = len(time)+1
409        file_size = stat(self.filename)[6]
410        file_size_increase =  file_size/i
411        if file_size + file_size_increase > self.max_size*(2**self.recursion):
412            #in order to get the file name and start time correct,
413            #I change the domian.filename and domain.starttime.
414            #This is the only way to do this without changing
415            #other modules (I think).
416
417            #write a filename addon that won't break swollens reader
418            #(10.sww is bad)
419            filename_ext = '_time_%s'%self.domain.time
420            filename_ext = filename_ext.replace('.', '_')
421            #remember the old filename, then give domain a
422            #name with the extension
423            old_domain_filename = self.domain.filename
424            if not self.recursion:
425                self.domain.filename = self.domain.filename+filename_ext
426
427            #change the domain starttime to the current time
428            old_domain_starttime = self.domain.starttime
429            self.domain.starttime = self.domain.time
430
431            #build a new data_structure.
432            next_data_structure=\
433                Data_format_sww(self.domain, mode=self.mode,\
434                                max_size = self.max_size,\
435                                recursion = self.recursion+1)
436            if not self.recursion:
437                print '    file_size = %s'%file_size
438                print '    saving file to %s'%next_data_structure.filename
439            #set up the new data_structure
440            self.domain.writer = next_data_structure
441
442            #FIXME - could be cleaner to use domain.store_timestep etc.
443            next_data_structure.store_connectivity()
444            next_data_structure.store_timestep(names)
445            fid.sync()
446            fid.close()
447
448            #restore the old starttime and filename
449            self.domain.starttime = old_domain_starttime
450            self.domain.filename = old_domain_filename
451        else:
452            self.recursion = False
453            domain = self.domain
454
455            # Get the variables
456            time = fid.variables['time']
457            stage = fid.variables['stage']
458            xmomentum = fid.variables['xmomentum']
459            ymomentum = fid.variables['ymomentum']
460            i = len(time)
461
462            #Store time
463            time[i] = self.domain.time
464
465
466            if type(names) not in [types.ListType, types.TupleType]:
467                names = [names]
468
469            for name in names:
470                # Get quantity
471                Q = domain.quantities[name]
472                A,V = Q.get_vertex_values(xy = False,
473                                          precision = self.precision)
474
475                #FIXME: Make this general (see below)
476                if name == 'stage':
477                    z = fid.variables['elevation']
478                    #print z[:]
479                    #print A-z[:]
480                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
481                    stage[i,:] = A.astype(self.precision)
482                elif name == 'xmomentum':
483                    xmomentum[i,:] = A.astype(self.precision)
484                elif name == 'ymomentum':
485                    ymomentum[i,:] = A.astype(self.precision)
486
487                #As in....
488            #eval( name + '[i,:] = A.astype(self.precision)' )
489            #FIXME: But we need a UNIT test for that before refactoring
490
491
492
493            #Flush and close
494            fid.sync()
495            fid.close()
496
497
498
499#Class for handling checkpoints data
500class Data_format_cpt(Data_format):
501    """Interface to native NetCDF format (.cpt)
502    """
503
504
505    def __init__(self, domain, mode = 'w'):
506        from Scientific.IO.NetCDF import NetCDFFile
507        from Numeric import Int, Float, Float
508
509        self.precision = Float #Use full precision
510
511        Data_format.__init__(self, domain, 'sww', mode)
512
513
514        # NetCDF file definition
515        fid = NetCDFFile(self.filename, mode)
516
517        if mode == 'w':
518            #Create new file
519            fid.institution = 'Geoscience Australia'
520            fid.description = 'Checkpoint data'
521            #fid.smooth = domain.smooth
522            fid.order = domain.default_order
523
524            # dimension definitions
525            fid.createDimension('number_of_volumes', self.number_of_volumes)
526            fid.createDimension('number_of_vertices', 3)
527
528            #Store info at all vertices (no smoothing)
529            fid.createDimension('number_of_points', 3*self.number_of_volumes)
530            fid.createDimension('number_of_timesteps', None) #extensible
531
532            # variable definitions
533
534            #Mesh
535            fid.createVariable('x', self.precision, ('number_of_points',))
536            fid.createVariable('y', self.precision, ('number_of_points',))
537
538
539            fid.createVariable('volumes', Int, ('number_of_volumes',
540                                                'number_of_vertices'))
541
542            fid.createVariable('time', self.precision,
543                               ('number_of_timesteps',))
544
545            #Allocate space for all quantities
546            for name in domain.quantities.keys():
547                fid.createVariable(name, self.precision,
548                                   ('number_of_timesteps',
549                                    'number_of_points'))
550
551        #Close
552        fid.close()
553
554
555    def store_checkpoint(self):
556        """
557        Write x,y coordinates of triangles.
558        Write connectivity (
559        constituting
560        the bed elevation.
561        """
562
563        from Scientific.IO.NetCDF import NetCDFFile
564
565        from Numeric import concatenate
566
567        domain = self.domain
568
569        #Get NetCDF
570        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
571
572        # Get the variables
573        x = fid.variables['x']
574        y = fid.variables['y']
575
576        volumes = fid.variables['volumes']
577
578        # Get X, Y and bed elevation Z
579        Q = domain.quantities['elevation']
580        X,Y,Z,V = Q.get_vertex_values(xy=True,
581                      precision = self.precision)
582
583
584
585        x[:] = X.astype(self.precision)
586        y[:] = Y.astype(self.precision)
587        z[:] = Z.astype(self.precision)
588
589        volumes[:] = V
590
591        #Close
592        fid.close()
593
594
595    def store_timestep(self, name):
596        """Store time and named quantity to file
597        """
598        from Scientific.IO.NetCDF import NetCDFFile
599        from time import sleep
600
601        #Get NetCDF
602        retries = 0
603        file_open = False
604        while not file_open and retries < 10:
605            try:
606                fid = NetCDFFile(self.filename, 'a') #Open existing file
607            except IOError:
608                #This could happen if someone was reading the file.
609                #In that case, wait a while and try again
610                msg = 'Warning (store_timestep): File %s could not be opened'\
611                  %self.filename
612                msg += ' - trying again'
613                print msg
614                retries += 1
615                sleep(1)
616            else:
617                file_open = True
618
619        if not file_open:
620            msg = 'File %s could not be opened for append' %self.filename
621            raise DataFileNotOPenError, msg
622
623
624        domain = self.domain
625
626        # Get the variables
627        time = fid.variables['time']
628        stage = fid.variables['stage']
629        i = len(time)
630
631        #Store stage
632        time[i] = self.domain.time
633
634        # Get quantity
635        Q = domain.quantities[name]
636        A,V = Q.get_vertex_values(xy=False,
637                  precision = self.precision)
638
639        stage[i,:] = A.astype(self.precision)
640
641        #Flush and close
642        fid.sync()
643        fid.close()
644
645
646
647
648
649#Function for storing xya output
650#FIXME Not done yet for this version
651#This is obsolete.  Use geo_spatial_data instead
652class Data_format_xya(Data_format):
653    """Generic interface to data formats
654    """
655
656
657    def __init__(self, domain, mode = 'w'):
658        from Scientific.IO.NetCDF import NetCDFFile
659        from Numeric import Int, Float, Float32
660
661        self.precision = Float32 #Use single precision
662
663        Data_format.__init__(self, domain, 'xya', mode)
664
665
666
667    #FIXME -This is the old xya format
668    def store_all(self):
669        """Specialisation of store all for xya format
670
671        Writes x,y,z coordinates of triangles constituting
672        the bed elevation.
673        """
674
675        from Numeric import concatenate
676
677        domain = self.domain
678
679        fd = open(self.filename, 'w')
680
681
682        if domain.smooth is True:
683            number_of_points =  len(domain.vertexlist)
684        else:
685            number_of_points = 3*self.number_of_volumes
686
687        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
688
689        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
690                 " # <vertex #> <x> <y> [attributes]" + "\n")
691
692
693        # Get X, Y, bed elevation and friction (index=0,1)
694        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
695                                           indices = (0,1), precision = self.precision)
696
697        bed_eles = A[:,0]
698        fricts = A[:,1]
699
700        # Get stage (index=0)
701        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
702                                       indices = (0,), precision = self.precision)
703
704        stages = B[:,0]
705
706        #<vertex #> <x> <y> [attributes]
707        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
708                                               stages, fricts):
709
710            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
711            fd.write(s)
712
713        #close
714        fd.close()
715
716
717    def store_timestep(self, t, V0, V1, V2):
718        """Store time, water heights (and momentums) to file
719        """
720        pass
721
722
723#### NBED national exposure database
724   
725LAT_TITLE = 'LATITUDE'
726LONG_TITLE = 'LONGITUDE'
727X_TITLE = 'x'
728Y_TITLE = 'y'
729class Exposure_csv:
730    def __init__(self,file_name, latitude_title=LAT_TITLE,
731                 longitude_title=LONG_TITLE, is_x_y_locations=False,
732                 x_title=X_TITLE, y_title=Y_TITLE,
733                 refine_polygon=None):
734        """
735        This class is for handling the exposure csv file.
736        It reads the file in and converts the lats and longs to a geospatial
737        data object.
738        Use the methods to read and write columns.
739
740        The format of the csv files it reads is;
741           The first row is a title row.
742           comma's are the delimiters
743           each column is a 'set' of data
744
745        Feel free to use/expand it to read other csv files.
746           
747           
748        It is not for adding and deleting rows
749       
750        Can geospatial handle string attributes? It's not made for them.
751        Currently it can't load and save string att's.
752
753        So just use geospatial to hold the x, y and georef? Bad, since
754        different att's are in diferent structures.  Not so bad, the info
755        to write if the .csv file is saved is in attribute_dic
756
757        The location info is in the geospatial attribute.
758       
759       
760        """
761        self._file_name = file_name
762        self._geospatial = None #
763
764        # self._attribute_dic is a dictionary.
765        #The keys are the column titles.
766        #The values are lists of column data
767       
768        # self._title_index_dic is a dictionary.
769        #The keys are the column titles.
770        #The values are the index positions of file columns.
771        self._attribute_dic, self._title_index_dic = \
772        self._load_exposure_csv(self._file_name)
773        try:
774            lats = self._attribute_dic[latitude_title]
775            longs = self._attribute_dic[longitude_title]
776           
777        except KeyError:
778            # maybe a warning..
779            pass
780        else:
781            self._geospatial = Geospatial_data(latitudes = lats,
782                 longitudes = longs)
783
784        if is_x_y_locations:
785            if self._geospatial is not None:
786                pass #fixme throw an error
787            try:
788                xs = self._attribute_dic[x_title]
789                ys = self._attribute_dic[y_title]
790                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
791            except KeyError:
792                # maybe a warning..
793                raise TitleValueError, msg
794            else:
795                self._geospatial = Geospatial_data(data_points=points)
796           
797        # create a list of points that are in the refining_polygon
798        # described by a list of indexes representing the points
799
800    def __cmp__(self, other):
801        #print "self._attribute_dic",self._attribute_dic
802        #print "other._attribute_dic",other._attribute_dic
803        #print "self._title_index_dic", self._title_index_dic
804        #print "other._title_index_dic", other._title_index_dic
805       
806        #check that a is an instance of this class
807        if isinstance(self, type(other)):
808            result = cmp(self._attribute_dic, other._attribute_dic)
809            if result <>0:
810                return result
811            # The order of the columns is important. Therefore..
812            result = cmp(self._title_index_dic, other._title_index_dic)
813            if result <>0:
814                return result
815            for self_ls, other_ls in map(None,self._attribute_dic, \
816                    other._attribute_dic):
817                result = cmp(self._attribute_dic[self_ls],
818                             other._attribute_dic[other_ls])
819                if result <>0:
820                    return result
821            return 0
822        else:
823            return 1
824   
825    def _load_exposure_csv(self, file_name):
826        """
827        Load in the csv as a dic, title as key and column info as value, .
828        Also, create a dic, title as key and column index as value.
829        """
830        #
831        attribute_dic = {}
832        title_index_dic = {}
833        titles_stripped = [] # list of titles
834       
835        reader = csv.reader(file(file_name))
836
837        # Read in and manipulate the title info
838        titles = reader.next()
839        for i,title in enumerate(titles):
840            titles_stripped.append(title.strip())
841            title_index_dic[title.strip()] = i
842        title_count = len(titles_stripped)       
843        #print "title_index_dic",title_index_dic
844
845       
846        #create a dic of colum values, indexed by column title
847        for line in reader:
848            if len(line) <> title_count:
849                raise IOError #FIXME make this nicer
850            for i, value in enumerate(line):
851                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
852           
853        return attribute_dic, title_index_dic
854
855    def get_column(self, column_name, use_refind_polygon=False):
856        """
857        Given a column name return a list of the column values
858
859        Note, the type of the values will be String!
860        Thinking of semi-hard coding this to not be so..
861       
862        Not implemented:
863        if use_refind_polygon is True, only return values in the
864        refined polygon
865        """
866        if not self._attribute_dic.has_key(column_name):
867            msg = 'Therer is  no column called %s!' %column_name
868            raise TitleValueError, msg
869        return self._attribute_dic[column_name]
870
871    def get_location(self, use_refind_polygon=False):
872        """
873        Return a geospatial object which describes the
874        locations of the location file.
875
876        Note, if there is not location info, this returns None.
877       
878        Not implemented:
879        if use_refind_polygon is True, only return values in the
880        refined polygon
881        """
882        return self._geospatial
883
884    def set_column(self, column_name, column_values, overwrite=False):
885        """
886        Add a column to the 'end' (with the right most column being the end)
887        of the csv file.
888
889        Set overwrite to True if you want to overwrite a column.
890       
891        Note, in column_name white space is removed and case is not checked.
892        Precondition
893        The column_name and column_values cannot have comma's in it.
894        """
895       
896        value_row_count = \
897            len(self._attribute_dic[self._title_index_dic.keys()[0]])
898        if len(column_values) <> value_row_count: 
899            msg = 'The number of column values must equal the number of rows.'
900            raise DataMissingValuesError, msg
901       
902        if self._attribute_dic.has_key(column_name):
903            if not overwrite:
904                msg = 'Column name %s already in use!' %column_name
905                raise TitleValueError, msg
906        else:
907            # New title.  Add it to the title index.
908            self._title_index_dic[column_name] = len(self._title_index_dic)
909        self._attribute_dic[column_name] = column_values
910        #print "self._title_index_dic[column_name]",self._title_index_dic
911
912    def save(self, file_name=None):
913        """
914        Save the exposure csv file
915        """
916        if file_name is None:
917            file_name = self._file_name
918       
919        fd = open(file_name,'wb')
920        writer = csv.writer(fd)
921       
922        #Write the title to a cvs file
923        line = [None]* len(self._title_index_dic)
924        for title in self._title_index_dic.iterkeys():
925            line[self._title_index_dic[title]]= title
926        writer.writerow(line)
927       
928        # Write the values to a cvs file
929        value_row_count = \
930            len(self._attribute_dic[self._title_index_dic.keys()[0]])
931        for row_i in range(value_row_count):
932            line = [None]* len(self._title_index_dic)
933            for title in self._title_index_dic.iterkeys():
934                line[self._title_index_dic[title]]= \
935                     self._attribute_dic[title][row_i]
936            writer.writerow(line)
937
938
939#Auxiliary
940def write_obj(filename,x,y,z):
941    """Store x,y,z vectors into filename (obj format)
942       Vectors are assumed to have dimension (M,3) where
943       M corresponds to the number elements.
944       triangles are assumed to be disconnected
945
946       The three numbers in each vector correspond to three vertices,
947
948       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
949
950    """
951    #print 'Writing obj to %s' % filename
952
953    import os.path
954
955    root, ext = os.path.splitext(filename)
956    if ext == '.obj':
957        FN = filename
958    else:
959        FN = filename + '.obj'
960
961
962    outfile = open(FN, 'wb')
963    outfile.write("# Triangulation as an obj file\n")
964
965    M, N = x.shape
966    assert N==3  #Assuming three vertices per element
967
968    for i in range(M):
969        for j in range(N):
970            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
971
972    for i in range(M):
973        base = i*N
974        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
975
976    outfile.close()
977
978
979#########################################################
980#Conversion routines
981########################################################
982
983def sww2obj(basefilename, size):
984    """Convert netcdf based data output to obj
985    """
986    from Scientific.IO.NetCDF import NetCDFFile
987
988    from Numeric import Float, zeros
989
990    #Get NetCDF
991    FN = create_filename('.', basefilename, 'sww', size)
992    print 'Reading from ', FN
993    fid = NetCDFFile(FN, 'r')  #Open existing file for read
994
995
996    # Get the variables
997    x = fid.variables['x']
998    y = fid.variables['y']
999    z = fid.variables['elevation']
1000    time = fid.variables['time']
1001    stage = fid.variables['stage']
1002
1003    M = size  #Number of lines
1004    xx = zeros((M,3), Float)
1005    yy = zeros((M,3), Float)
1006    zz = zeros((M,3), Float)
1007
1008    for i in range(M):
1009        for j in range(3):
1010            xx[i,j] = x[i+j*M]
1011            yy[i,j] = y[i+j*M]
1012            zz[i,j] = z[i+j*M]
1013
1014    #Write obj for bathymetry
1015    FN = create_filename('.', basefilename, 'obj', size)
1016    write_obj(FN,xx,yy,zz)
1017
1018
1019    #Now read all the data with variable information, combine with
1020    #x,y info and store as obj
1021
1022    for k in range(len(time)):
1023        t = time[k]
1024        print 'Processing timestep %f' %t
1025
1026        for i in range(M):
1027            for j in range(3):
1028                zz[i,j] = stage[k,i+j*M]
1029
1030
1031        #Write obj for variable data
1032        #FN = create_filename(basefilename, 'obj', size, time=t)
1033        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1034        write_obj(FN,xx,yy,zz)
1035
1036
1037def dat2obj(basefilename):
1038    """Convert line based data output to obj
1039    FIXME: Obsolete?
1040    """
1041
1042    import glob, os
1043    from config import data_dir
1044
1045
1046    #Get bathymetry and x,y's
1047    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1048
1049    from Numeric import zeros, Float
1050
1051    M = len(lines)  #Number of lines
1052    x = zeros((M,3), Float)
1053    y = zeros((M,3), Float)
1054    z = zeros((M,3), Float)
1055
1056    ##i = 0
1057    for i, line in enumerate(lines):
1058        tokens = line.split()
1059        values = map(float,tokens)
1060
1061        for j in range(3):
1062            x[i,j] = values[j*3]
1063            y[i,j] = values[j*3+1]
1064            z[i,j] = values[j*3+2]
1065
1066        ##i += 1
1067
1068
1069    #Write obj for bathymetry
1070    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1071
1072
1073    #Now read all the data files with variable information, combine with
1074    #x,y info
1075    #and store as obj
1076
1077    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1078
1079    for filename in files:
1080        print 'Processing %s' % filename
1081
1082        lines = open(data_dir+os.sep+filename,'r').readlines()
1083        assert len(lines) == M
1084        root, ext = os.path.splitext(filename)
1085
1086        #Get time from filename
1087        i0 = filename.find('_time=')
1088        if i0 == -1:
1089            #Skip bathymetry file
1090            continue
1091
1092        i0 += 6  #Position where time starts
1093        i1 = filename.find('.dat')
1094
1095        if i1 > i0:
1096            t = float(filename[i0:i1])
1097        else:
1098            raise DataTimeError, 'Hmmmm'
1099
1100
1101
1102        ##i = 0
1103        for i, line in enumerate(lines):
1104            tokens = line.split()
1105            values = map(float,tokens)
1106
1107            for j in range(3):
1108                z[i,j] = values[j]
1109
1110            ##i += 1
1111
1112        #Write obj for variable data
1113        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1114
1115
1116def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1117    """Read netcdf filename1, pick timesteps first:step:last and save to
1118    nettcdf file filename2
1119    """
1120    from Scientific.IO.NetCDF import NetCDFFile
1121
1122    #Get NetCDF
1123    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1124    outfile = NetCDFFile(filename2, 'w')  #Open new file
1125
1126
1127    #Copy dimensions
1128    for d in infile.dimensions:
1129        outfile.createDimension(d, infile.dimensions[d])
1130
1131    for name in infile.variables:
1132        var = infile.variables[name]
1133        outfile.createVariable(name, var.typecode(), var.dimensions)
1134
1135
1136    #Copy the static variables
1137    for name in infile.variables:
1138        if name == 'time' or name == 'stage':
1139            pass
1140        else:
1141            #Copy
1142            outfile.variables[name][:] = infile.variables[name][:]
1143
1144    #Copy selected timesteps
1145    time = infile.variables['time']
1146    stage = infile.variables['stage']
1147
1148    newtime = outfile.variables['time']
1149    newstage = outfile.variables['stage']
1150
1151    if last is None:
1152        last = len(time)
1153
1154    selection = range(first, last, step)
1155    for i, j in enumerate(selection):
1156        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1157        newtime[i] = time[j]
1158        newstage[i,:] = stage[j,:]
1159
1160    #Close
1161    infile.close()
1162    outfile.close()
1163
1164
1165#Get data objects
1166def get_dataobject(domain, mode='w'):
1167    """Return instance of class of given format using filename
1168    """
1169
1170    cls = eval('Data_format_%s' %domain.format)
1171    return cls(domain, mode)
1172
1173#FIXME move into geospatial.  There should only be one method that
1174# reads xya, and writes pts.
1175def xya2pts(basename_in, basename_out=None, verbose=False,
1176            #easting_min=None, easting_max=None,
1177            #northing_min=None, northing_max=None,
1178            stride = 1,
1179            attribute_name = 'elevation',
1180            z_func = None):
1181    """Read points data from ascii (.xya)
1182
1183    Example:
1184
1185              x(m)        y(m)        z(m)
1186         0.00000e+00  0.00000e+00  1.3535000e-01
1187         0.00000e+00  1.40000e-02  1.3535000e-01
1188
1189
1190
1191    Convert to NetCDF pts format which is
1192
1193    points:  (Nx2) Float array
1194    elevation: N Float array
1195
1196    Only lines that contain three numeric values are processed
1197
1198    If z_func is specified, it will be applied to the third field
1199    """
1200
1201    import os
1202    #from Scientific.IO.NetCDF import NetCDFFile
1203    from Numeric import Float, arrayrange, concatenate
1204
1205    root, ext = os.path.splitext(basename_in)
1206
1207    if ext == '': ext = '.xya'
1208
1209    #Get NetCDF
1210    infile = open(root + ext, 'r')  #Open existing xya file for read
1211
1212    if verbose: print 'Reading xya points from %s' %(root + ext)
1213
1214    points = []
1215    attribute = []
1216    for i, line in enumerate(infile.readlines()):
1217
1218        if i % stride != 0: continue
1219
1220        fields = line.split()
1221
1222        try:
1223            assert len(fields) == 3
1224        except:
1225            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1226
1227        try:
1228            x = float( fields[0] )
1229            y = float( fields[1] )
1230            z = float( fields[2] )
1231        except:
1232            continue
1233
1234        points.append( [x, y] )
1235
1236        if callable(z_func):
1237            attribute.append(z_func(z))
1238        else:
1239            attribute.append(z)
1240
1241
1242    #Get output file
1243    if basename_out == None:
1244        ptsname = root + '.pts'
1245    else:
1246        ptsname = basename_out + '.pts'
1247
1248    if verbose: print 'Store to NetCDF file %s' %ptsname
1249    write_ptsfile(ptsname, points, attribute, attribute_name)
1250
1251
1252
1253######Obsoleted by export_points in load_mesh
1254def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1255                  zone=None, xllcorner=None, yllcorner=None):
1256    """Write points and associated attribute to pts (NetCDF) format
1257    """
1258
1259    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1260
1261    from Numeric import Float
1262
1263    if attribute_name is None:
1264        attribute_name = 'attribute'
1265
1266
1267    from Scientific.IO.NetCDF import NetCDFFile
1268
1269    # NetCDF file definition
1270    outfile = NetCDFFile(ptsname, 'w')
1271
1272
1273    #Create new file
1274    outfile.institution = 'Geoscience Australia'
1275    outfile.description = 'NetCDF pts format for compact and '\
1276                          'portable storage of spatial point data'
1277
1278
1279    #Georeferencing
1280    from coordinate_transforms.geo_reference import Geo_reference
1281    if zone is None:
1282        assert xllcorner is None, 'xllcorner must be None'
1283        assert yllcorner is None, 'yllcorner must be None'
1284        Geo_reference().write_NetCDF(outfile)
1285    else:
1286        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1287
1288
1289
1290    outfile.createDimension('number_of_points', len(points))
1291    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1292
1293    # variable definitions
1294    outfile.createVariable('points', Float, ('number_of_points',
1295                                             'number_of_dimensions'))
1296    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1297
1298    # Get handles to the variables
1299    nc_points = outfile.variables['points']
1300    nc_attribute = outfile.variables[attribute_name]
1301
1302    #Store data
1303    nc_points[:, :] = points
1304    nc_attribute[:] = attribute
1305
1306    outfile.close()
1307
1308
1309def dem2pts(basename_in, basename_out=None,
1310            easting_min=None, easting_max=None,
1311            northing_min=None, northing_max=None,
1312            use_cache=False, verbose=False,):
1313    """Read Digitial Elevation model from the following NetCDF format (.dem)
1314
1315    Example:
1316
1317    ncols         3121
1318    nrows         1800
1319    xllcorner     722000
1320    yllcorner     5893000
1321    cellsize      25
1322    NODATA_value  -9999
1323    138.3698 137.4194 136.5062 135.5558 ..........
1324
1325    Convert to NetCDF pts format which is
1326
1327    points:  (Nx2) Float array
1328    elevation: N Float array
1329    """
1330
1331
1332
1333    kwargs = {'basename_out': basename_out,
1334              'easting_min': easting_min,
1335              'easting_max': easting_max,
1336              'northing_min': northing_min,
1337              'northing_max': northing_max,
1338              'verbose': verbose}
1339
1340    if use_cache is True:
1341        from caching import cache
1342        result = cache(_dem2pts, basename_in, kwargs,
1343                       dependencies = [basename_in + '.dem'],
1344                       verbose = verbose)
1345
1346    else:
1347        result = apply(_dem2pts, [basename_in], kwargs)
1348
1349    return result
1350
1351
1352def _dem2pts(basename_in, basename_out=None, verbose=False,
1353            easting_min=None, easting_max=None,
1354            northing_min=None, northing_max=None):
1355    """Read Digitial Elevation model from the following NetCDF format (.dem)
1356
1357    Internal function. See public function dem2pts for details.
1358    """
1359
1360    #FIXME: Can this be written feasibly using write_pts?
1361
1362    import os
1363    from Scientific.IO.NetCDF import NetCDFFile
1364    from Numeric import Float, zeros, reshape, sum
1365
1366    root = basename_in
1367
1368    #Get NetCDF
1369    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1370
1371    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1372
1373    ncols = infile.ncols[0]
1374    nrows = infile.nrows[0]
1375    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1376    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1377    cellsize = infile.cellsize[0]
1378    NODATA_value = infile.NODATA_value[0]
1379    dem_elevation = infile.variables['elevation']
1380
1381    zone = infile.zone[0]
1382    false_easting = infile.false_easting[0]
1383    false_northing = infile.false_northing[0]
1384
1385    #Text strings
1386    projection = infile.projection
1387    datum = infile.datum
1388    units = infile.units
1389
1390
1391    #Get output file
1392    if basename_out == None:
1393        ptsname = root + '.pts'
1394    else:
1395        ptsname = basename_out + '.pts'
1396
1397    if verbose: print 'Store to NetCDF file %s' %ptsname
1398    # NetCDF file definition
1399    outfile = NetCDFFile(ptsname, 'w')
1400
1401    #Create new file
1402    outfile.institution = 'Geoscience Australia'
1403    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1404                      'of spatial point data'
1405    #assign default values
1406    if easting_min is None: easting_min = xllcorner
1407    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1408    if northing_min is None: northing_min = yllcorner
1409    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1410
1411    #compute offsets to update georeferencing
1412    easting_offset = xllcorner - easting_min
1413    northing_offset = yllcorner - northing_min
1414
1415    #Georeferencing
1416    outfile.zone = zone
1417    outfile.xllcorner = easting_min #Easting of lower left corner
1418    outfile.yllcorner = northing_min #Northing of lower left corner
1419    outfile.false_easting = false_easting
1420    outfile.false_northing = false_northing
1421
1422    outfile.projection = projection
1423    outfile.datum = datum
1424    outfile.units = units
1425
1426
1427    #Grid info (FIXME: probably not going to be used, but heck)
1428    outfile.ncols = ncols
1429    outfile.nrows = nrows
1430
1431    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1432    totalnopoints = nrows*ncols
1433
1434    # calculating number of NODATA_values for each row in clipped region
1435    #FIXME: use array operations to do faster
1436    nn = 0
1437    k = 0
1438    i1_0 = 0
1439    j1_0 = 0
1440    thisj = 0
1441    thisi = 0
1442    for i in range(nrows):
1443        y = (nrows-i-1)*cellsize + yllcorner
1444        for j in range(ncols):
1445            x = j*cellsize + xllcorner
1446            if easting_min <= x <= easting_max and \
1447               northing_min <= y <= northing_max:
1448                thisj = j
1449                thisi = i
1450                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1451
1452                if k == 0:
1453                    i1_0 = i
1454                    j1_0 = j
1455                k += 1
1456
1457    index1 = j1_0
1458    index2 = thisj
1459
1460    # dimension definitions
1461    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1462    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1463
1464    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1465    nopoints = clippednopoints-nn
1466
1467    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1468
1469    if verbose and nn > 0:
1470        print 'There are %d values in the elevation' %totalnopoints
1471        print 'There are %d values in the clipped elevation' %clippednopoints
1472        print 'There are %d NODATA_values in the clipped elevation' %nn
1473
1474    outfile.createDimension('number_of_points', nopoints)
1475    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1476
1477    # variable definitions
1478    outfile.createVariable('points', Float, ('number_of_points',
1479                                             'number_of_dimensions'))
1480    outfile.createVariable('elevation', Float, ('number_of_points',))
1481
1482    # Get handles to the variables
1483    points = outfile.variables['points']
1484    elevation = outfile.variables['elevation']
1485
1486    lenv = index2-index1+1
1487    #Store data
1488    global_index = 0
1489    #for i in range(nrows):
1490    for i in range(i1_0,thisi+1,1):
1491        if verbose and i%((nrows+10)/10)==0:
1492            print 'Processing row %d of %d' %(i, nrows)
1493
1494        lower_index = global_index
1495
1496        v = dem_elevation_r[i,index1:index2+1]
1497        no_NODATA = sum(v == NODATA_value)
1498        if no_NODATA > 0:
1499            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1500        else:
1501            newcols = lenv #ncols_in_bounding_box
1502
1503        telev = zeros(newcols, Float)
1504        tpoints = zeros((newcols, 2), Float)
1505
1506        local_index = 0
1507
1508        y = (nrows-i-1)*cellsize + yllcorner
1509        #for j in range(ncols):
1510        for j in range(j1_0,index2+1,1):
1511
1512            x = j*cellsize + xllcorner
1513            if easting_min <= x <= easting_max and \
1514               northing_min <= y <= northing_max and \
1515               dem_elevation_r[i,j] <> NODATA_value:
1516                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1517                telev[local_index] = dem_elevation_r[i, j]
1518                global_index += 1
1519                local_index += 1
1520
1521        upper_index = global_index
1522
1523        if upper_index == lower_index + newcols:
1524            points[lower_index:upper_index, :] = tpoints
1525            elevation[lower_index:upper_index] = telev
1526
1527    assert global_index == nopoints, 'index not equal to number of points'
1528
1529    infile.close()
1530    outfile.close()
1531
1532
1533
1534def _read_hecras_cross_sections(lines):
1535    """Return block of surface lines for each cross section
1536    Starts with SURFACE LINE,
1537    Ends with END CROSS-SECTION
1538    """
1539
1540    points = []
1541
1542    reading_surface = False
1543    for i, line in enumerate(lines):
1544
1545        if len(line.strip()) == 0:    #Ignore blanks
1546            continue
1547
1548        if lines[i].strip().startswith('SURFACE LINE'):
1549            reading_surface = True
1550            continue
1551
1552        if lines[i].strip().startswith('END') and reading_surface:
1553            yield points
1554            reading_surface = False
1555            points = []
1556
1557        if reading_surface:
1558            fields = line.strip().split(',')
1559            easting = float(fields[0])
1560            northing = float(fields[1])
1561            elevation = float(fields[2])
1562            points.append([easting, northing, elevation])
1563
1564
1565
1566
1567def hecras_cross_sections2pts(basename_in,
1568                              basename_out=None,
1569                              verbose=False):
1570    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1571
1572    Example:
1573
1574
1575# RAS export file created on Mon 15Aug2005 11:42
1576# by HEC-RAS Version 3.1.1
1577
1578BEGIN HEADER:
1579  UNITS: METRIC
1580  DTM TYPE: TIN
1581  DTM: v:\1\cit\perth_topo\river_tin
1582  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1583  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1584  MAP PROJECTION: UTM
1585  PROJECTION ZONE: 50
1586  DATUM: AGD66
1587  VERTICAL DATUM:
1588  NUMBER OF REACHES:  19
1589  NUMBER OF CROSS-SECTIONS:  14206
1590END HEADER:
1591
1592
1593Only the SURFACE LINE data of the following form will be utilised
1594
1595  CROSS-SECTION:
1596    STREAM ID:Southern-Wungong
1597    REACH ID:Southern-Wungong
1598    STATION:19040.*
1599    CUT LINE:
1600      405548.671603161 , 6438142.7594925
1601      405734.536092045 , 6438326.10404912
1602      405745.130459356 , 6438331.48627354
1603      405813.89633823 , 6438368.6272789
1604    SURFACE LINE:
1605     405548.67,   6438142.76,   35.37
1606     405552.24,   6438146.28,   35.41
1607     405554.78,   6438148.78,   35.44
1608     405555.80,   6438149.79,   35.44
1609     405559.37,   6438153.31,   35.45
1610     405560.88,   6438154.81,   35.44
1611     405562.93,   6438156.83,   35.42
1612     405566.50,   6438160.35,   35.38
1613     405566.99,   6438160.83,   35.37
1614     ...
1615   END CROSS-SECTION
1616
1617    Convert to NetCDF pts format which is
1618
1619    points:  (Nx2) Float array
1620    elevation: N Float array
1621    """
1622
1623    #FIXME: Can this be written feasibly using write_pts?
1624
1625    import os
1626    from Scientific.IO.NetCDF import NetCDFFile
1627    from Numeric import Float, zeros, reshape
1628
1629    root = basename_in
1630
1631    #Get ASCII file
1632    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1633
1634    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1635
1636    lines = infile.readlines()
1637    infile.close()
1638
1639    if verbose: print 'Converting to pts format'
1640
1641    i = 0
1642    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1643        i += 1
1644
1645    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1646    i += 1
1647
1648    assert lines[i].strip().upper().startswith('UNITS:')
1649    units = lines[i].strip().split()[1]
1650    i += 1
1651
1652    assert lines[i].strip().upper().startswith('DTM TYPE:')
1653    i += 1
1654
1655    assert lines[i].strip().upper().startswith('DTM:')
1656    i += 1
1657
1658    assert lines[i].strip().upper().startswith('STREAM')
1659    i += 1
1660
1661    assert lines[i].strip().upper().startswith('CROSS')
1662    i += 1
1663
1664    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1665    projection = lines[i].strip().split(':')[1]
1666    i += 1
1667
1668    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1669    zone = int(lines[i].strip().split(':')[1])
1670    i += 1
1671
1672    assert lines[i].strip().upper().startswith('DATUM:')
1673    datum = lines[i].strip().split(':')[1]
1674    i += 1
1675
1676    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1677    i += 1
1678
1679    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1680    i += 1
1681
1682    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1683    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1684    i += 1
1685
1686
1687    #Now read all points
1688    points = []
1689    elevation = []
1690    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1691        for k, entry in enumerate(entries):
1692            points.append(entry[:2])
1693            elevation.append(entry[2])
1694
1695
1696    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1697          %(j+1, number_of_cross_sections)
1698    assert j+1 == number_of_cross_sections, msg
1699
1700    #Get output file
1701    if basename_out == None:
1702        ptsname = root + '.pts'
1703    else:
1704        ptsname = basename_out + '.pts'
1705
1706    #FIXME (DSG-ON): use loadASCII export_points_file
1707    if verbose: print 'Store to NetCDF file %s' %ptsname
1708    # NetCDF file definition
1709    outfile = NetCDFFile(ptsname, 'w')
1710
1711    #Create new file
1712    outfile.institution = 'Geoscience Australia'
1713    outfile.description = 'NetCDF pts format for compact and portable ' +\
1714                          'storage of spatial point data derived from HEC-RAS'
1715
1716    #Georeferencing
1717    outfile.zone = zone
1718    outfile.xllcorner = 0.0
1719    outfile.yllcorner = 0.0
1720    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1721    outfile.false_northing = 1000000
1722
1723    outfile.projection = projection
1724    outfile.datum = datum
1725    outfile.units = units
1726
1727
1728    outfile.createDimension('number_of_points', len(points))
1729    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1730
1731    # variable definitions
1732    outfile.createVariable('points', Float, ('number_of_points',
1733                                             'number_of_dimensions'))
1734    outfile.createVariable('elevation', Float, ('number_of_points',))
1735
1736    # Get handles to the variables
1737    outfile.variables['points'][:] = points
1738    outfile.variables['elevation'][:] = elevation
1739
1740    outfile.close()
1741
1742
1743
1744def sww2dem(basename_in, basename_out = None,
1745            quantity = None,
1746            timestep = None,
1747            reduction = None,
1748            cellsize = 10,
1749            NODATA_value = -9999,
1750            easting_min = None,
1751            easting_max = None,
1752            northing_min = None,
1753            northing_max = None,
1754            verbose = False,
1755            origin = None,
1756            datum = 'WGS84',
1757        format = 'ers'):
1758
1759    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1760
1761    Example (ASC):
1762
1763    ncols         3121
1764    nrows         1800
1765    xllcorner     722000
1766    yllcorner     5893000
1767    cellsize      25
1768    NODATA_value  -9999
1769    138.3698 137.4194 136.5062 135.5558 ..........
1770
1771    Also write accompanying file with same basename_in but extension .prj
1772    used to fix the UTM zone, datum, false northings and eastings.
1773
1774    The prj format is assumed to be as
1775
1776    Projection    UTM
1777    Zone          56
1778    Datum         WGS84
1779    Zunits        NO
1780    Units         METERS
1781    Spheroid      WGS84
1782    Xshift        0.0000000000
1783    Yshift        10000000.0000000000
1784    Parameters
1785
1786
1787    The parameter quantity must be the name of an existing quantity or
1788    an expression involving existing quantities. The default is
1789    'elevation'.
1790
1791    if timestep (an index) is given, output quantity at that timestep
1792
1793    if reduction is given use that to reduce quantity over all timesteps.
1794
1795    datum
1796
1797    format can be either 'asc' or 'ers'
1798    """
1799
1800    import sys
1801    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1802    from Numeric import array2string
1803
1804    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1805    from util import apply_expression_to_dictionary
1806
1807    msg = 'Format must be either asc or ers'
1808    assert format.lower() in ['asc', 'ers'], msg
1809
1810
1811    false_easting = 500000
1812    false_northing = 10000000
1813
1814    if quantity is None:
1815        quantity = 'elevation'
1816
1817    if reduction is None:
1818        reduction = max
1819
1820    if basename_out is None:
1821        basename_out = basename_in + '_%s' %quantity
1822
1823    swwfile = basename_in + '.sww'
1824    demfile = basename_out + '.' + format
1825    # Note the use of a .ers extension is optional (write_ermapper_grid will
1826    # deal with either option
1827
1828    #Read sww file
1829    if verbose: print 'Reading from %s' %swwfile
1830    from Scientific.IO.NetCDF import NetCDFFile
1831    fid = NetCDFFile(swwfile)
1832
1833    #Get extent and reference
1834    x = fid.variables['x'][:]
1835    y = fid.variables['y'][:]
1836    volumes = fid.variables['volumes'][:]
1837
1838    number_of_timesteps = fid.dimensions['number_of_timesteps']
1839    number_of_points = fid.dimensions['number_of_points']
1840    if origin is None:
1841
1842        #Get geo_reference
1843        #sww files don't have to have a geo_ref
1844        try:
1845            geo_reference = Geo_reference(NetCDFObject=fid)
1846        except AttributeError, e:
1847            geo_reference = Geo_reference() #Default georef object
1848
1849        xllcorner = geo_reference.get_xllcorner()
1850        yllcorner = geo_reference.get_yllcorner()
1851        zone = geo_reference.get_zone()
1852    else:
1853        zone = origin[0]
1854        xllcorner = origin[1]
1855        yllcorner = origin[2]
1856
1857
1858
1859    #FIXME: Refactor using code from file_function.statistics
1860    #Something like print swwstats(swwname)
1861    if verbose:
1862        x = fid.variables['x'][:]
1863        y = fid.variables['y'][:]
1864        times = fid.variables['time'][:]
1865        print '------------------------------------------------'
1866        print 'Statistics of SWW file:'
1867        print '  Name: %s' %swwfile
1868        print '  Reference:'
1869        print '    Lower left corner: [%f, %f]'\
1870              %(xllcorner, yllcorner)
1871        print '    Start time: %f' %fid.starttime[0]
1872        print '  Extent:'
1873        print '    x [m] in [%f, %f], len(x) == %d'\
1874              %(min(x.flat), max(x.flat), len(x.flat))
1875        print '    y [m] in [%f, %f], len(y) == %d'\
1876              %(min(y.flat), max(y.flat), len(y.flat))
1877        print '    t [s] in [%f, %f], len(t) == %d'\
1878              %(min(times), max(times), len(times))
1879        print '  Quantities [SI units]:'
1880        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1881            q = fid.variables[name][:].flat
1882            print '    %s in [%f, %f]' %(name, min(q), max(q))
1883
1884
1885
1886
1887
1888    # Get quantity and reduce if applicable
1889    if verbose: print 'Processing quantity %s' %quantity
1890
1891    # Turn NetCDF objects into Numeric arrays
1892    quantity_dict = {}
1893    for name in fid.variables.keys():
1894        quantity_dict[name] = fid.variables[name][:]
1895
1896
1897
1898    # Convert quantity expression to quantities found in sww file   
1899    q = apply_expression_to_dictionary(quantity, quantity_dict)
1900
1901
1902
1903    if len(q.shape) == 2:
1904        #q has a time component and needs to be reduced along
1905        #the temporal dimension
1906        if verbose: print 'Reducing quantity %s' %quantity
1907        q_reduced = zeros( number_of_points, Float )
1908
1909        for k in range(number_of_points):
1910            q_reduced[k] = reduction( q[:,k] )
1911
1912        q = q_reduced
1913
1914    #Post condition: Now q has dimension: number_of_points
1915    assert len(q.shape) == 1
1916    assert q.shape[0] == number_of_points
1917
1918
1919    if verbose:
1920        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1921
1922
1923    #Create grid and update xll/yll corner and x,y
1924
1925    #Relative extent
1926    if easting_min is None:
1927        xmin = min(x)
1928    else:
1929        xmin = easting_min - xllcorner
1930
1931    if easting_max is None:
1932        xmax = max(x)
1933    else:
1934        xmax = easting_max - xllcorner
1935
1936    if northing_min is None:
1937        ymin = min(y)
1938    else:
1939        ymin = northing_min - yllcorner
1940
1941    if northing_max is None:
1942        ymax = max(y)
1943    else:
1944        ymax = northing_max - yllcorner
1945
1946
1947
1948    if verbose: print 'Creating grid'
1949    ncols = int((xmax-xmin)/cellsize)+1
1950    nrows = int((ymax-ymin)/cellsize)+1
1951
1952
1953    #New absolute reference and coordinates
1954    newxllcorner = xmin+xllcorner
1955    newyllcorner = ymin+yllcorner
1956
1957    x = x+xllcorner-newxllcorner
1958    y = y+yllcorner-newyllcorner
1959
1960    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1961    assert len(vertex_points.shape) == 2
1962
1963
1964
1965    grid_points = zeros ( (ncols*nrows, 2), Float )
1966
1967
1968    for i in xrange(nrows):
1969        if format.lower() == 'asc':
1970            yg = i*cellsize
1971        else:
1972        #this will flip the order of the y values for ers
1973            yg = (nrows-i)*cellsize
1974
1975        for j in xrange(ncols):
1976            xg = j*cellsize
1977            k = i*ncols + j
1978
1979            grid_points[k,0] = xg
1980            grid_points[k,1] = yg
1981
1982    #Interpolate
1983    #from least_squares import Interpolation
1984    from fit_interpolate.interpolate import Interpolate
1985
1986
1987    interp = Interpolate(vertex_points, volumes, verbose = verbose)
1988
1989
1990
1991    #Interpolate using quantity values
1992    if verbose: print 'Interpolating'
1993    grid_values = interp.interpolate(q, grid_points).flat
1994
1995
1996    if verbose:
1997        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1998                                                      max(grid_values))
1999
2000    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2001    P = interp.mesh.get_boundary_polygon()
2002    outside_indices = outside_polygon(grid_points, P, closed=True)
2003
2004    for i in outside_indices:
2005        grid_values[i] = NODATA_value
2006
2007
2008
2009
2010    if format.lower() == 'ers':
2011        # setup ERS header information
2012        grid_values = reshape(grid_values,(nrows, ncols))
2013        header = {}
2014        header['datum'] = '"' + datum + '"'
2015        # FIXME The use of hardwired UTM and zone number needs to be made optional
2016        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2017        header['projection'] = '"UTM-' + str(zone) + '"'
2018        header['coordinatetype'] = 'EN'
2019        if header['coordinatetype'] == 'LL':
2020            header['longitude'] = str(newxllcorner)
2021            header['latitude'] = str(newyllcorner)
2022        elif header['coordinatetype'] == 'EN':
2023            header['eastings'] = str(newxllcorner)
2024            header['northings'] = str(newyllcorner)
2025        header['nullcellvalue'] = str(NODATA_value)
2026        header['xdimension'] = str(cellsize)
2027        header['ydimension'] = str(cellsize)
2028        header['value'] = '"' + quantity + '"'
2029        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2030
2031
2032        #Write
2033        if verbose: print 'Writing %s' %demfile
2034        import ermapper_grids
2035        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2036
2037        fid.close()
2038    else:
2039        #Write to Ascii format
2040
2041        #Write prj file
2042        prjfile = basename_out + '.prj'
2043
2044        if verbose: print 'Writing %s' %prjfile
2045        prjid = open(prjfile, 'w')
2046        prjid.write('Projection    %s\n' %'UTM')
2047        prjid.write('Zone          %d\n' %zone)
2048        prjid.write('Datum         %s\n' %datum)
2049        prjid.write('Zunits        NO\n')
2050        prjid.write('Units         METERS\n')
2051        prjid.write('Spheroid      %s\n' %datum)
2052        prjid.write('Xshift        %d\n' %false_easting)
2053        prjid.write('Yshift        %d\n' %false_northing)
2054        prjid.write('Parameters\n')
2055        prjid.close()
2056
2057
2058
2059        if verbose: print 'Writing %s' %demfile
2060
2061        ascid = open(demfile, 'w')
2062
2063        ascid.write('ncols         %d\n' %ncols)
2064        ascid.write('nrows         %d\n' %nrows)
2065        ascid.write('xllcorner     %d\n' %newxllcorner)
2066        ascid.write('yllcorner     %d\n' %newyllcorner)
2067        ascid.write('cellsize      %f\n' %cellsize)
2068        ascid.write('NODATA_value  %d\n' %NODATA_value)
2069
2070
2071        #Get bounding polygon from mesh
2072        #P = interp.mesh.get_boundary_polygon()
2073        #inside_indices = inside_polygon(grid_points, P)
2074
2075        for i in range(nrows):
2076            if verbose and i%((nrows+10)/10)==0:
2077                print 'Doing row %d of %d' %(i, nrows)
2078
2079            base_index = (nrows-i-1)*ncols
2080
2081            slice = grid_values[base_index:base_index+ncols]
2082            s = array2string(slice, max_line_width=sys.maxint)
2083            ascid.write(s[1:-1] + '\n')
2084
2085
2086            #print
2087            #for j in range(ncols):
2088            #    index = base_index+j#
2089            #    print grid_values[index],
2090            #    ascid.write('%f ' %grid_values[index])
2091            #ascid.write('\n')
2092
2093        #Close
2094        ascid.close()
2095        fid.close()
2096
2097#Backwards compatibility
2098def sww2asc(basename_in, basename_out = None,
2099            quantity = None,
2100            timestep = None,
2101            reduction = None,
2102            cellsize = 10,
2103            verbose = False,
2104            origin = None):
2105    print 'sww2asc will soon be obsoleted - please use sww2dem'
2106    sww2dem(basename_in,
2107            basename_out = basename_out,
2108            quantity = quantity,
2109            timestep = timestep,
2110            reduction = reduction,
2111            cellsize = cellsize,
2112            verbose = verbose,
2113            origin = origin,
2114        datum = 'WGS84',
2115        format = 'asc')
2116
2117def sww2ers(basename_in, basename_out = None,
2118            quantity = None,
2119            timestep = None,
2120            reduction = None,
2121            cellsize = 10,
2122            verbose = False,
2123            origin = None,
2124            datum = 'WGS84'):
2125    print 'sww2ers will soon be obsoleted - please use sww2dem'
2126    sww2dem(basename_in,
2127            basename_out = basename_out,
2128            quantity = quantity,
2129            timestep = timestep,
2130            reduction = reduction,
2131            cellsize = cellsize,
2132            verbose = verbose,
2133            origin = origin,
2134            datum = datum,
2135            format = 'ers')
2136################################# END COMPATIBILITY ##############
2137
2138
2139
2140def sww2pts(basename_in, basename_out=None,
2141            data_points=None,
2142            quantity=None,
2143            timestep=None,
2144            reduction=None,
2145            NODATA_value=-9999,
2146            verbose=False,
2147            origin=None):
2148            #datum = 'WGS84')
2149
2150
2151    """Read SWW file and convert to interpolated values at selected points
2152
2153    The parameter quantity' must be the name of an existing quantity or
2154    an expression involving existing quantities. The default is
2155    'elevation'.
2156
2157    if timestep (an index) is given, output quantity at that timestep
2158
2159    if reduction is given use that to reduce quantity over all timesteps.
2160
2161    data_points (Nx2 array) give locations of points where quantity is to be computed.
2162   
2163    """
2164
2165    import sys
2166    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2167    from Numeric import array2string
2168
2169    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2170    from util import apply_expression_to_dictionary
2171
2172    from geospatial_data import Geospatial_data
2173
2174    if quantity is None:
2175        quantity = 'elevation'
2176
2177    if reduction is None:
2178        reduction = max
2179
2180    if basename_out is None:
2181        basename_out = basename_in + '_%s' %quantity
2182
2183    swwfile = basename_in + '.sww'
2184    ptsfile = basename_out + '.pts'
2185
2186    # Read sww file
2187    if verbose: print 'Reading from %s' %swwfile
2188    from Scientific.IO.NetCDF import NetCDFFile
2189    fid = NetCDFFile(swwfile)
2190
2191    # Get extent and reference
2192    x = fid.variables['x'][:]
2193    y = fid.variables['y'][:]
2194    volumes = fid.variables['volumes'][:]
2195
2196    number_of_timesteps = fid.dimensions['number_of_timesteps']
2197    number_of_points = fid.dimensions['number_of_points']
2198    if origin is None:
2199
2200        # Get geo_reference
2201        # sww files don't have to have a geo_ref
2202        try:
2203            geo_reference = Geo_reference(NetCDFObject=fid)
2204        except AttributeError, e:
2205            geo_reference = Geo_reference() #Default georef object
2206
2207        xllcorner = geo_reference.get_xllcorner()
2208        yllcorner = geo_reference.get_yllcorner()
2209        zone = geo_reference.get_zone()
2210    else:
2211        zone = origin[0]
2212        xllcorner = origin[1]
2213        yllcorner = origin[2]
2214
2215
2216
2217    # FIXME: Refactor using code from file_function.statistics
2218    # Something like print swwstats(swwname)
2219    if verbose:
2220        x = fid.variables['x'][:]
2221        y = fid.variables['y'][:]
2222        times = fid.variables['time'][:]
2223        print '------------------------------------------------'
2224        print 'Statistics of SWW file:'
2225        print '  Name: %s' %swwfile
2226        print '  Reference:'
2227        print '    Lower left corner: [%f, %f]'\
2228              %(xllcorner, yllcorner)
2229        print '    Start time: %f' %fid.starttime[0]
2230        print '  Extent:'
2231        print '    x [m] in [%f, %f], len(x) == %d'\
2232              %(min(x.flat), max(x.flat), len(x.flat))
2233        print '    y [m] in [%f, %f], len(y) == %d'\
2234              %(min(y.flat), max(y.flat), len(y.flat))
2235        print '    t [s] in [%f, %f], len(t) == %d'\
2236              %(min(times), max(times), len(times))
2237        print '  Quantities [SI units]:'
2238        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2239            q = fid.variables[name][:].flat
2240            print '    %s in [%f, %f]' %(name, min(q), max(q))
2241
2242
2243
2244    # Get quantity and reduce if applicable
2245    if verbose: print 'Processing quantity %s' %quantity
2246
2247    # Turn NetCDF objects into Numeric arrays
2248    quantity_dict = {}
2249    for name in fid.variables.keys():
2250        quantity_dict[name] = fid.variables[name][:]
2251
2252
2253
2254    # Convert quantity expression to quantities found in sww file   
2255    q = apply_expression_to_dictionary(quantity, quantity_dict)
2256
2257
2258
2259    if len(q.shape) == 2:
2260        # q has a time component and needs to be reduced along
2261        # the temporal dimension
2262        if verbose: print 'Reducing quantity %s' %quantity
2263        q_reduced = zeros( number_of_points, Float )
2264
2265        for k in range(number_of_points):
2266            q_reduced[k] = reduction( q[:,k] )
2267
2268        q = q_reduced
2269
2270    # Post condition: Now q has dimension: number_of_points
2271    assert len(q.shape) == 1
2272    assert q.shape[0] == number_of_points
2273
2274
2275    if verbose:
2276        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2277
2278
2279    # Create grid and update xll/yll corner and x,y
2280    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2281    assert len(vertex_points.shape) == 2
2282
2283    # Interpolate
2284    from fit_interpolate.interpolate import Interpolate
2285    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2286
2287    # Interpolate using quantity values
2288    if verbose: print 'Interpolating'
2289    interpolated_values = interp.interpolate(q, data_points).flat
2290
2291
2292    if verbose:
2293        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2294                                                      max(interpolated_values))
2295
2296    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2297    P = interp.mesh.get_boundary_polygon()
2298    outside_indices = outside_polygon(data_points, P, closed=True)
2299
2300    for i in outside_indices:
2301        interpolated_values[i] = NODATA_value
2302
2303
2304    # Store results   
2305    G = Geospatial_data(data_points=data_points,
2306                        attributes=interpolated_values)
2307
2308    G.export_points_file(ptsfile, absolute = True)
2309
2310    fid.close()
2311
2312
2313def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2314                                  use_cache = False,
2315                                  verbose = False):
2316    """Read Digitial Elevation model from the following ASCII format (.asc)
2317
2318    Example:
2319
2320    ncols         3121
2321    nrows         1800
2322    xllcorner     722000
2323    yllcorner     5893000
2324    cellsize      25
2325    NODATA_value  -9999
2326    138.3698 137.4194 136.5062 135.5558 ..........
2327
2328    Convert basename_in + '.asc' to NetCDF format (.dem)
2329    mimicking the ASCII format closely.
2330
2331
2332    An accompanying file with same basename_in but extension .prj must exist
2333    and is used to fix the UTM zone, datum, false northings and eastings.
2334
2335    The prj format is assumed to be as
2336
2337    Projection    UTM
2338    Zone          56
2339    Datum         WGS84
2340    Zunits        NO
2341    Units         METERS
2342    Spheroid      WGS84
2343    Xshift        0.0000000000
2344    Yshift        10000000.0000000000
2345    Parameters
2346    """
2347
2348
2349
2350    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2351
2352    if use_cache is True:
2353        from caching import cache
2354        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2355                       dependencies = [basename_in + '.asc'],
2356                       verbose = verbose)
2357
2358    else:
2359        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2360
2361    return result
2362
2363
2364
2365
2366
2367
2368def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2369                                  verbose = False):
2370    """Read Digitial Elevation model from the following ASCII format (.asc)
2371
2372    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2373    """
2374
2375    import os
2376    from Scientific.IO.NetCDF import NetCDFFile
2377    from Numeric import Float, array
2378
2379    #root, ext = os.path.splitext(basename_in)
2380    root = basename_in
2381
2382    ###########################################
2383    # Read Meta data
2384    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2385    metadatafile = open(root + '.prj')
2386    metalines = metadatafile.readlines()
2387    metadatafile.close()
2388
2389    L = metalines[0].strip().split()
2390    assert L[0].strip().lower() == 'projection'
2391    projection = L[1].strip()                   #TEXT
2392
2393    L = metalines[1].strip().split()
2394    assert L[0].strip().lower() == 'zone'
2395    zone = int(L[1].strip())
2396
2397    L = metalines[2].strip().split()
2398    assert L[0].strip().lower() == 'datum'
2399    datum = L[1].strip()                        #TEXT
2400
2401    L = metalines[3].strip().split()
2402    assert L[0].strip().lower() == 'zunits'     #IGNORE
2403    zunits = L[1].strip()                       #TEXT
2404
2405    L = metalines[4].strip().split()
2406    assert L[0].strip().lower() == 'units'
2407    units = L[1].strip()                        #TEXT
2408
2409    L = metalines[5].strip().split()
2410    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2411    spheroid = L[1].strip()                     #TEXT
2412
2413    L = metalines[6].strip().split()
2414    assert L[0].strip().lower() == 'xshift'
2415    false_easting = float(L[1].strip())
2416
2417    L = metalines[7].strip().split()
2418    assert L[0].strip().lower() == 'yshift'
2419    false_northing = float(L[1].strip())
2420
2421    #print false_easting, false_northing, zone, datum
2422
2423
2424    ###########################################
2425    #Read DEM data
2426
2427    datafile = open(basename_in + '.asc')
2428
2429    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2430    lines = datafile.readlines()
2431    datafile.close()
2432
2433    if verbose: print 'Got', len(lines), ' lines'
2434
2435    ncols = int(lines[0].split()[1].strip())
2436    nrows = int(lines[1].split()[1].strip())
2437    xllcorner = float(lines[2].split()[1].strip())
2438    yllcorner = float(lines[3].split()[1].strip())
2439    cellsize = float(lines[4].split()[1].strip())
2440    NODATA_value = int(lines[5].split()[1].strip())
2441
2442    assert len(lines) == nrows + 6
2443
2444
2445    ##########################################
2446
2447
2448    if basename_out == None:
2449        netcdfname = root + '.dem'
2450    else:
2451        netcdfname = basename_out + '.dem'
2452
2453    if verbose: print 'Store to NetCDF file %s' %netcdfname
2454    # NetCDF file definition
2455    fid = NetCDFFile(netcdfname, 'w')
2456
2457    #Create new file
2458    fid.institution = 'Geoscience Australia'
2459    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2460                      'of spatial point data'
2461
2462    fid.ncols = ncols
2463    fid.nrows = nrows
2464    fid.xllcorner = xllcorner
2465    fid.yllcorner = yllcorner
2466    fid.cellsize = cellsize
2467    fid.NODATA_value = NODATA_value
2468
2469    fid.zone = zone
2470    fid.false_easting = false_easting
2471    fid.false_northing = false_northing
2472    fid.projection = projection
2473    fid.datum = datum
2474    fid.units = units
2475
2476
2477    # dimension definitions
2478    fid.createDimension('number_of_rows', nrows)
2479    fid.createDimension('number_of_columns', ncols)
2480
2481    # variable definitions
2482    fid.createVariable('elevation', Float, ('number_of_rows',
2483                                            'number_of_columns'))
2484
2485    # Get handles to the variables
2486    elevation = fid.variables['elevation']
2487
2488    #Store data
2489    n = len(lines[6:])
2490    for i, line in enumerate(lines[6:]):
2491        fields = line.split()
2492        if verbose and i%((n+10)/10)==0:
2493            print 'Processing row %d of %d' %(i, nrows)
2494
2495        elevation[i, :] = array([float(x) for x in fields])
2496
2497    fid.close()
2498
2499
2500
2501
2502
2503def ferret2sww(basename_in, basename_out = None,
2504               verbose = False,
2505               minlat = None, maxlat = None,
2506               minlon = None, maxlon = None,
2507               mint = None, maxt = None, mean_stage = 0,
2508               origin = None, zscale = 1,
2509               fail_on_NaN = True,
2510               NaN_filler = 0,
2511               elevation = None,
2512               inverted_bathymetry = False
2513               ): #FIXME: Bathymetry should be obtained
2514                                  #from MOST somehow.
2515                                  #Alternatively from elsewhere
2516                                  #or, as a last resort,
2517                                  #specified here.
2518                                  #The value of -100 will work
2519                                  #for the Wollongong tsunami
2520                                  #scenario but is very hacky
2521    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2522    sww format native to pyvolution.
2523
2524    Specify only basename_in and read files of the form
2525    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2526    relative height, x-velocity and y-velocity, respectively.
2527
2528    Also convert latitude and longitude to UTM. All coordinates are
2529    assumed to be given in the GDA94 datum.
2530
2531    min's and max's: If omitted - full extend is used.
2532    To include a value min may equal it, while max must exceed it.
2533    Lat and lon are assuemd to be in decimal degrees
2534
2535    origin is a 3-tuple with geo referenced
2536    UTM coordinates (zone, easting, northing)
2537
2538    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2539    which means that longitude is the fastest
2540    varying dimension (row major order, so to speak)
2541
2542    ferret2sww uses grid points as vertices in a triangular grid
2543    counting vertices from lower left corner upwards, then right
2544    """
2545
2546    import os
2547    from Scientific.IO.NetCDF import NetCDFFile
2548    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2549    from Numeric import allclose, around
2550
2551    precision = Float
2552
2553    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2554
2555    if minlat != None:
2556        assert -90 < minlat < 90 , msg
2557    if maxlat != None:
2558        assert -90 < maxlat < 90 , msg
2559    if minlon != None:
2560        assert -180 < minlon < 180 , msg
2561    if maxlon != None:
2562        assert -180 < maxlon < 180 , msg
2563
2564
2565    #Get NetCDF data
2566    if verbose: print 'Reading files %s_*.nc' %basename_in
2567    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2568    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2569    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2570    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2571
2572    if basename_out is None:
2573        swwname = basename_in + '.sww'
2574    else:
2575        swwname = basename_out + '.sww'
2576
2577#    Get dimensions of file_h
2578    for dimension in file_h.dimensions.keys():
2579        if dimension[:3] == 'LON':
2580            dim_h_longitude = dimension
2581        if dimension[:3] == 'LAT':
2582            dim_h_latitude = dimension
2583        if dimension[:4] == 'TIME':
2584            dim_h_time = dimension
2585
2586#    print 'long:', dim_h_longitude
2587#    print 'lats:', dim_h_latitude
2588#    print 'times:', dim_h_time
2589
2590    times = file_h.variables[dim_h_time]
2591    latitudes = file_h.variables[dim_h_latitude]
2592    longitudes = file_h.variables[dim_h_longitude]
2593   
2594# get dimensions for file_e
2595    for dimension in file_e.dimensions.keys():
2596        if dimension[:3] == 'LON':
2597            dim_e_longitude = dimension
2598        if dimension[:3] == 'LAT':
2599            dim_e_latitude = dimension
2600        if dimension[:4] == 'TIME':
2601            dim_e_time = dimension
2602
2603# get dimensions for file_u
2604    for dimension in file_u.dimensions.keys():
2605        if dimension[:3] == 'LON':
2606            dim_u_longitude = dimension
2607        if dimension[:3] == 'LAT':
2608            dim_u_latitude = dimension
2609        if dimension[:4] == 'TIME':
2610            dim_u_time = dimension
2611           
2612# get dimensions for file_v
2613    for dimension in file_v.dimensions.keys():
2614        if dimension[:3] == 'LON':
2615            dim_v_longitude = dimension
2616        if dimension[:3] == 'LAT':
2617            dim_v_latitude = dimension
2618        if dimension[:4] == 'TIME':
2619            dim_v_time = dimension
2620
2621
2622    #Precision used by most for lat/lon is 4 or 5 decimals
2623    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2624    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2625
2626    #Check that files are compatible
2627    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2628    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2629    assert allclose(latitudes, e_lat)
2630
2631    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2632    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2633    assert allclose(longitudes, e_lon)
2634
2635    if mint == None:
2636        jmin = 0
2637    else:
2638        jmin = searchsorted(times, mint)
2639
2640    if maxt == None:
2641        jmax=len(times)
2642    else:
2643        jmax = searchsorted(times, maxt)
2644
2645    if minlat == None:
2646        kmin=0
2647    else:
2648        kmin = searchsorted(latitudes, minlat)
2649
2650    if maxlat == None:
2651        kmax = len(latitudes)
2652    else:
2653        kmax = searchsorted(latitudes, maxlat)
2654
2655    if minlon == None:
2656        lmin=0
2657    else:
2658        lmin = searchsorted(longitudes, minlon)
2659
2660    if maxlon == None:
2661        lmax = len(longitudes)
2662    else:
2663        lmax = searchsorted(longitudes, maxlon)
2664
2665#    print' j', jmin, jmax
2666    times = times[jmin:jmax]
2667    latitudes = latitudes[kmin:kmax]
2668    longitudes = longitudes[lmin:lmax]
2669
2670
2671    if verbose: print 'cropping'
2672    zname = 'ELEVATION'
2673
2674    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2675    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2676    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2677    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2678
2679    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2680    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2681    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2682    #        from Numeric import asarray
2683    #        elevations=elevations.tolist()
2684    #        elevations.reverse()
2685    #        elevations=asarray(elevations)
2686    #    else:
2687    #        from Numeric import asarray
2688    #        elevations=elevations.tolist()
2689    #        elevations.reverse()
2690    #        elevations=asarray(elevations)
2691    #        'print hmmm'
2692
2693
2694
2695    #Get missing values
2696    nan_ha = file_h.variables['HA'].missing_value[0]
2697    nan_ua = file_u.variables['UA'].missing_value[0]
2698    nan_va = file_v.variables['VA'].missing_value[0]
2699    if hasattr(file_e.variables[zname],'missing_value'):
2700        nan_e  = file_e.variables[zname].missing_value[0]
2701    else:
2702        nan_e = None
2703
2704    #Cleanup
2705    from Numeric import sometrue
2706
2707    missing = (amplitudes == nan_ha)
2708    if sometrue (missing):
2709        if fail_on_NaN:
2710            msg = 'NetCDFFile %s contains missing values'\
2711                  %(basename_in+'_ha.nc')
2712            raise DataMissingValuesError, msg
2713        else:
2714            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2715
2716    missing = (uspeed == nan_ua)
2717    if sometrue (missing):
2718        if fail_on_NaN:
2719            msg = 'NetCDFFile %s contains missing values'\
2720                  %(basename_in+'_ua.nc')
2721            raise DataMissingValuesError, msg
2722        else:
2723            uspeed = uspeed*(missing==0) + missing*NaN_filler
2724
2725    missing = (vspeed == nan_va)
2726    if sometrue (missing):
2727        if fail_on_NaN:
2728            msg = 'NetCDFFile %s contains missing values'\
2729                  %(basename_in+'_va.nc')
2730            raise DataMissingValuesError, msg
2731        else:
2732            vspeed = vspeed*(missing==0) + missing*NaN_filler
2733
2734
2735    missing = (elevations == nan_e)
2736    if sometrue (missing):
2737        if fail_on_NaN:
2738            msg = 'NetCDFFile %s contains missing values'\
2739                  %(basename_in+'_e.nc')
2740            raise DataMissingValuesError, msg
2741        else:
2742            elevations = elevations*(missing==0) + missing*NaN_filler
2743
2744    #######
2745
2746
2747
2748    number_of_times = times.shape[0]
2749    number_of_latitudes = latitudes.shape[0]
2750    number_of_longitudes = longitudes.shape[0]
2751
2752    assert amplitudes.shape[0] == number_of_times
2753    assert amplitudes.shape[1] == number_of_latitudes
2754    assert amplitudes.shape[2] == number_of_longitudes
2755
2756    if verbose:
2757        print '------------------------------------------------'
2758        print 'Statistics:'
2759        print '  Extent (lat/lon):'
2760        print '    lat in [%f, %f], len(lat) == %d'\
2761              %(min(latitudes.flat), max(latitudes.flat),
2762                len(latitudes.flat))
2763        print '    lon in [%f, %f], len(lon) == %d'\
2764              %(min(longitudes.flat), max(longitudes.flat),
2765                len(longitudes.flat))
2766        print '    t in [%f, %f], len(t) == %d'\
2767              %(min(times.flat), max(times.flat), len(times.flat))
2768
2769        q = amplitudes.flat
2770        name = 'Amplitudes (ha) [cm]'
2771        print %s in [%f, %f]' %(name, min(q), max(q))
2772
2773        q = uspeed.flat
2774        name = 'Speeds (ua) [cm/s]'
2775        print %s in [%f, %f]' %(name, min(q), max(q))
2776
2777        q = vspeed.flat
2778        name = 'Speeds (va) [cm/s]'
2779        print %s in [%f, %f]' %(name, min(q), max(q))
2780
2781        q = elevations.flat
2782        name = 'Elevations (e) [m]'
2783        print %s in [%f, %f]' %(name, min(q), max(q))
2784
2785
2786    #print number_of_latitudes, number_of_longitudes
2787    number_of_points = number_of_latitudes*number_of_longitudes
2788    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2789
2790
2791    file_h.close()
2792    file_u.close()
2793    file_v.close()
2794    file_e.close()
2795
2796
2797    # NetCDF file definition
2798    outfile = NetCDFFile(swwname, 'w')
2799
2800    #Create new file
2801    outfile.institution = 'Geoscience Australia'
2802    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2803                          %(basename_in + '_ha.nc',
2804                            basename_in + '_ua.nc',
2805                            basename_in + '_va.nc',
2806                            basename_in + '_e.nc')
2807
2808
2809    #For sww compatibility
2810    outfile.smoothing = 'Yes'
2811    outfile.order = 1
2812
2813    #Start time in seconds since the epoch (midnight 1/1/1970)
2814    outfile.starttime = starttime = times[0]
2815    times = times - starttime  #Store relative times
2816
2817    # dimension definitions
2818    outfile.createDimension('number_of_volumes', number_of_volumes)
2819
2820    outfile.createDimension('number_of_vertices', 3)
2821    outfile.createDimension('number_of_points', number_of_points)
2822
2823
2824    #outfile.createDimension('number_of_timesteps', len(times))
2825    outfile.createDimension('number_of_timesteps', len(times))
2826
2827    # variable definitions
2828    outfile.createVariable('x', precision, ('number_of_points',))
2829    outfile.createVariable('y', precision, ('number_of_points',))
2830    outfile.createVariable('elevation', precision, ('number_of_points',))
2831
2832    #FIXME: Backwards compatibility
2833    outfile.createVariable('z', precision, ('number_of_points',))
2834    #################################
2835
2836    outfile.createVariable('volumes', Int, ('number_of_volumes',
2837                                            'number_of_vertices'))
2838
2839    outfile.createVariable('time', precision,
2840                           ('number_of_timesteps',))
2841
2842    outfile.createVariable('stage', precision,
2843                           ('number_of_timesteps',
2844                            'number_of_points'))
2845
2846    outfile.createVariable('xmomentum', precision,
2847                           ('number_of_timesteps',
2848                            'number_of_points'))
2849
2850    outfile.createVariable('ymomentum', precision,
2851                           ('number_of_timesteps',
2852                            'number_of_points'))
2853
2854
2855    #Store
2856    from coordinate_transforms.redfearn import redfearn
2857    x = zeros(number_of_points, Float)  #Easting
2858    y = zeros(number_of_points, Float)  #Northing
2859
2860
2861    if verbose: print 'Making triangular grid'
2862    #Check zone boundaries
2863    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2864
2865    vertices = {}
2866    i = 0
2867    for k, lat in enumerate(latitudes):       #Y direction
2868        for l, lon in enumerate(longitudes):  #X direction
2869
2870            vertices[l,k] = i
2871
2872            zone, easting, northing = redfearn(lat,lon)
2873
2874            msg = 'Zone boundary crossed at longitude =', lon
2875            #assert zone == refzone, msg
2876            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2877            x[i] = easting
2878            y[i] = northing
2879            i += 1
2880
2881
2882    #Construct 2 triangles per 'rectangular' element
2883    volumes = []
2884    for l in range(number_of_longitudes-1):    #X direction
2885        for k in range(number_of_latitudes-1): #Y direction
2886            v1 = vertices[l,k+1]
2887            v2 = vertices[l,k]
2888            v3 = vertices[l+1,k+1]
2889            v4 = vertices[l+1,k]
2890
2891            volumes.append([v1,v2,v3]) #Upper element
2892            volumes.append([v4,v3,v2]) #Lower element
2893
2894    volumes = array(volumes)
2895
2896    if origin == None:
2897        zone = refzone
2898        xllcorner = min(x)
2899        yllcorner = min(y)
2900    else:
2901        zone = origin[0]
2902        xllcorner = origin[1]
2903        yllcorner = origin[2]
2904
2905
2906    outfile.xllcorner = xllcorner
2907    outfile.yllcorner = yllcorner
2908    outfile.zone = zone
2909
2910
2911    if elevation is not None:
2912        z = elevation
2913    else:
2914        if inverted_bathymetry:
2915            z = -1*elevations
2916        else:
2917            z = elevations
2918    #FIXME: z should be obtained from MOST and passed in here
2919
2920    from Numeric import resize
2921    z = resize(z,outfile.variables['z'][:].shape)
2922    outfile.variables['x'][:] = x - xllcorner
2923    outfile.variables['y'][:] = y - yllcorner
2924    outfile.variables['z'][:] = z             #FIXME HACK
2925    outfile.variables['elevation'][:] = z
2926    outfile.variables['time'][:] = times   #Store time relative
2927    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2928
2929
2930
2931    #Time stepping
2932    stage = outfile.variables['stage']
2933    xmomentum = outfile.variables['xmomentum']
2934    ymomentum = outfile.variables['ymomentum']
2935
2936    if verbose: print 'Converting quantities'
2937    n = len(times)
2938    for j in range(n):
2939        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2940        i = 0
2941        for k in range(number_of_latitudes):      #Y direction
2942            for l in range(number_of_longitudes): #X direction
2943                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2944                stage[j,i] = w
2945                h = w - z[i]
2946                xmomentum[j,i] = uspeed[j,k,l]/100*h
2947                ymomentum[j,i] = vspeed[j,k,l]/100*h
2948                i += 1
2949
2950    #outfile.close()
2951
2952    #FIXME: Refactor using code from file_function.statistics
2953    #Something like print swwstats(swwname)
2954    if verbose:
2955        x = outfile.variables['x'][:]
2956        y = outfile.variables['y'][:]
2957        times = outfile.variables['time'][:]
2958        print '------------------------------------------------'
2959        print 'Statistics of output file:'
2960        print '  Name: %s' %swwname
2961        print '  Reference:'
2962        print '    Lower left corner: [%f, %f]'\
2963              %(xllcorner, yllcorner)
2964        print '    Start time: %f' %starttime
2965        print '  Extent:'
2966        print '    x [m] in [%f, %f], len(x) == %d'\
2967              %(min(x.flat), max(x.flat), len(x.flat))
2968        print '    y [m] in [%f, %f], len(y) == %d'\
2969              %(min(y.flat), max(y.flat), len(y.flat))
2970        print '    t [s] in [%f, %f], len(t) == %d'\
2971              %(min(times), max(times), len(times))
2972        print '  Quantities [SI units]:'
2973        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2974            q = outfile.variables[name][:].flat
2975            print '    %s in [%f, %f]' %(name, min(q), max(q))
2976
2977
2978
2979    outfile.close()
2980
2981
2982
2983
2984
2985def timefile2netcdf(filename, quantity_names = None):
2986    """Template for converting typical text files with time series to
2987    NetCDF tms file.
2988
2989
2990    The file format is assumed to be either two fields separated by a comma:
2991
2992        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2993
2994    E.g
2995
2996      31/08/04 00:00:00, 1.328223 0 0
2997      31/08/04 00:15:00, 1.292912 0 0
2998
2999    will provide a time dependent function f(t) with three attributes
3000
3001    filename is assumed to be the rootname with extenisons .txt and .sww
3002    """
3003
3004    import time, calendar
3005    from Numeric import array
3006    from config import time_format
3007    from utilities.numerical_tools import ensure_numeric
3008
3009
3010
3011    fid = open(filename + '.txt')
3012    line = fid.readline()
3013    fid.close()
3014
3015    fields = line.split(',')
3016    msg = 'File %s must have the format date, value0 value1 value2 ...'
3017    assert len(fields) == 2, msg
3018
3019    try:
3020        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3021    except ValueError:
3022        msg = 'First field in file %s must be' %filename
3023        msg += ' date-time with format %s.\n' %time_format
3024        msg += 'I got %s instead.' %fields[0]
3025        raise DataTimeError, msg
3026
3027
3028    #Split values
3029    values = []
3030    for value in fields[1].split():
3031        values.append(float(value))
3032
3033    q = ensure_numeric(values)
3034
3035    msg = 'ERROR: File must contain at least one independent value'
3036    assert len(q.shape) == 1, msg
3037
3038
3039
3040    #Read times proper
3041    from Numeric import zeros, Float, alltrue
3042    from config import time_format
3043    import time, calendar
3044
3045    fid = open(filename + '.txt')
3046    lines = fid.readlines()
3047    fid.close()
3048
3049    N = len(lines)
3050    d = len(q)
3051
3052    T = zeros(N, Float)       #Time
3053    Q = zeros((N, d), Float)  #Values
3054
3055    for i, line in enumerate(lines):
3056        fields = line.split(',')
3057        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3058
3059        T[i] = realtime - starttime
3060
3061        for j, value in enumerate(fields[1].split()):
3062            Q[i, j] = float(value)
3063
3064    msg = 'File %s must list time as a monotonuosly ' %filename
3065    msg += 'increasing sequence'
3066    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3067
3068
3069    #Create NetCDF file
3070    from Scientific.IO.NetCDF import NetCDFFile
3071
3072    fid = NetCDFFile(filename + '.tms', 'w')
3073
3074
3075    fid.institution = 'Geoscience Australia'
3076    fid.description = 'Time series'
3077
3078
3079    #Reference point
3080    #Start time in seconds since the epoch (midnight 1/1/1970)
3081    #FIXME: Use Georef
3082    fid.starttime = starttime
3083
3084
3085    # dimension definitions
3086    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3087    #fid.createDimension('number_of_vertices', 3)
3088
3089
3090    fid.createDimension('number_of_timesteps', len(T))
3091
3092    fid.createVariable('time', Float, ('number_of_timesteps',))
3093
3094    fid.variables['time'][:] = T
3095
3096    for i in range(Q.shape[1]):
3097        try:
3098            name = quantity_names[i]
3099        except:
3100            name = 'Attribute%d'%i
3101
3102        fid.createVariable(name, Float, ('number_of_timesteps',))
3103        fid.variables[name][:] = Q[:,i]
3104
3105    fid.close()
3106
3107
3108def extent_sww(file_name):
3109    """
3110    Read in an sww file.
3111
3112    Input;
3113    file_name - the sww file
3114
3115    Output;
3116    z - Vector of bed elevation
3117    volumes - Array.  Each row has 3 values, representing
3118    the vertices that define the volume
3119    time - Vector of the times where there is stage information
3120    stage - array with respect to time and vertices (x,y)
3121    """
3122
3123
3124    from Scientific.IO.NetCDF import NetCDFFile
3125
3126    #Check contents
3127    #Get NetCDF
3128    fid = NetCDFFile(file_name, 'r')
3129
3130    # Get the variables
3131    x = fid.variables['x'][:]
3132    y = fid.variables['y'][:]
3133    stage = fid.variables['stage'][:]
3134    #print "stage",stage
3135    #print "stage.shap",stage.shape
3136    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3137    #print "min(stage)",min(stage)
3138
3139    fid.close()
3140
3141    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3142
3143
3144def sww2domain(filename,boundary=None,t=None,\
3145               fail_if_NaN=True,NaN_filler=0\
3146               ,verbose = False,very_verbose = False):
3147    """
3148    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3149
3150    Boundary is not recommended if domain.smooth is not selected, as it
3151    uses unique coordinates, but not unique boundaries. This means that
3152    the boundary file will not be compatable with the coordinates, and will
3153    give a different final boundary, or crash.
3154    """
3155    NaN=9.969209968386869e+036
3156    #initialise NaN.
3157
3158    from Scientific.IO.NetCDF import NetCDFFile
3159    from shallow_water import Domain
3160    from Numeric import asarray, transpose, resize
3161
3162    if verbose: print 'Reading from ', filename
3163    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3164    time = fid.variables['time']       #Timesteps
3165    if t is None:
3166        t = time[-1]
3167    time_interp = get_time_interp(time,t)
3168
3169    # Get the variables as Numeric arrays
3170    x = fid.variables['x'][:]             #x-coordinates of vertices
3171    y = fid.variables['y'][:]             #y-coordinates of vertices
3172    elevation = fid.variables['elevation']     #Elevation
3173    stage = fid.variables['stage']     #Water level
3174    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3175    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3176
3177    starttime = fid.starttime[0]
3178    volumes = fid.variables['volumes'][:] #Connectivity
3179    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3180
3181    conserved_quantities = []
3182    interpolated_quantities = {}
3183    other_quantities = []
3184
3185    # get geo_reference
3186    #sww files don't have to have a geo_ref
3187    try:
3188        geo_reference = Geo_reference(NetCDFObject=fid)
3189    except: #AttributeError, e:
3190        geo_reference = None
3191
3192    if verbose: print '    getting quantities'
3193    for quantity in fid.variables.keys():
3194        dimensions = fid.variables[quantity].dimensions
3195        if 'number_of_timesteps' in dimensions:
3196            conserved_quantities.append(quantity)
3197            interpolated_quantities[quantity]=\
3198                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3199        else: other_quantities.append(quantity)
3200
3201    other_quantities.remove('x')
3202    other_quantities.remove('y')
3203    other_quantities.remove('z')
3204    other_quantities.remove('volumes')
3205
3206    conserved_quantities.remove('time')
3207
3208    if verbose: print '    building domain'
3209    #    From domain.Domain:
3210    #    domain = Domain(coordinates, volumes,\
3211    #                    conserved_quantities = conserved_quantities,\
3212    #                    other_quantities = other_quantities,zone=zone,\
3213    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3214
3215    #   From shallow_water.Domain:
3216    coordinates=coordinates.tolist()
3217    volumes=volumes.tolist()
3218    #FIXME:should this be in mesh?(peter row)
3219    if fid.smoothing == 'Yes': unique = False
3220    else: unique = True
3221    if unique:
3222        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3223
3224
3225    try:
3226        domain = Domain(coordinates, volumes, boundary)
3227    except AssertionError, e:
3228        fid.close()
3229        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3230        raise DataDomainError, msg
3231
3232    if not boundary is None:
3233        domain.boundary = boundary
3234
3235    domain.geo_reference = geo_reference
3236
3237    domain.starttime=float(starttime)+float(t)
3238    domain.time=0.0
3239
3240    for quantity in other_quantities:
3241        try:
3242            NaN = fid.variables[quantity].missing_value
3243        except:
3244            pass #quantity has no missing_value number
3245        X = fid.variables[quantity][:]
3246        if very_verbose:
3247            print '       ',quantity
3248            print '        NaN =',NaN
3249            print '        max(X)'
3250            print '       ',max(X)
3251            print '        max(X)==NaN'
3252            print '       ',max(X)==NaN
3253            print ''
3254        if (max(X)==NaN) or (min(X)==NaN):
3255            if fail_if_NaN:
3256                msg = 'quantity "%s" contains no_data entry'%quantity
3257                raise DataMissingValuesError, msg
3258            else:
3259                data = (X<>NaN)
3260                X = (X*data)+(data==0)*NaN_filler
3261        if unique:
3262            X = resize(X,(len(X)/3,3))
3263        domain.set_quantity(quantity,X)
3264    #
3265    for quantity in conserved_quantities:
3266        try:
3267            NaN = fid.variables[quantity].missing_value
3268        except:
3269            pass #quantity has no missing_value number
3270        X = interpolated_quantities[quantity]
3271        if very_verbose:
3272            print '       ',quantity
3273            print '        NaN =',NaN
3274            print '        max(X)'
3275            print '       ',max(X)
3276            print '        max(X)==NaN'
3277            print '       ',max(X)==NaN
3278            print ''
3279        if (max(X)==NaN) or (min(X)==NaN):
3280            if fail_if_NaN:
3281                msg = 'quantity "%s" contains no_data entry'%quantity
3282                raise DataMissingValuesError, msg
3283            else:
3284                data = (X<>NaN)
3285                X = (X*data)+(data==0)*NaN_filler
3286        if unique:
3287            X = resize(X,(X.shape[0]/3,3))
3288        domain.set_quantity(quantity,X)
3289
3290    fid.close()
3291    return domain
3292
3293def interpolated_quantity(saved_quantity,time_interp):
3294
3295    #given an index and ratio, interpolate quantity with respect to time.
3296    index,ratio = time_interp
3297    Q = saved_quantity
3298    if ratio > 0:
3299        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3300    else:
3301        q = Q[index]
3302    #Return vector of interpolated values
3303    return q
3304
3305def get_time_interp(time,t=None):
3306    #Finds the ratio and index for time interpolation.
3307    #It is borrowed from previous pyvolution code.
3308    if t is None:
3309        t=time[-1]
3310        index = -1
3311        ratio = 0.
3312    else:
3313        T = time
3314        tau = t
3315        index=0
3316        msg = 'Time interval derived from file %s [%s:%s]'\
3317            %('FIXMEfilename', T[0], T[-1])
3318        msg += ' does not match model time: %s' %tau
3319        if tau < time[0]: raise DataTimeError, msg
3320        if tau > time[-1]: raise DataTimeError, msg
3321        while tau > time[index]: index += 1
3322        while tau < time[index]: index -= 1
3323        if tau == time[index]:
3324            #Protect against case where tau == time[-1] (last time)
3325            # - also works in general when tau == time[i]
3326            ratio = 0
3327        else:
3328            #t is now between index and index+1
3329            ratio = (tau - time[index])/(time[index+1] - time[index])
3330    return (index,ratio)
3331
3332
3333def weed(coordinates,volumes,boundary = None):
3334    if type(coordinates)=='array':
3335        coordinates = coordinates.tolist()
3336    if type(volumes)=='array':
3337        volumes = volumes.tolist()
3338
3339    unique = False
3340    point_dict = {}
3341    same_point = {}
3342    for i in range(len(coordinates)):
3343        point = tuple(coordinates[i])
3344        if point_dict.has_key(point):
3345            unique = True
3346            same_point[i]=point
3347            #to change all point i references to point j
3348        else:
3349            point_dict[point]=i
3350            same_point[i]=point
3351
3352    coordinates = []
3353    i = 0
3354    for point in point_dict.keys():
3355        point = tuple(point)
3356        coordinates.append(list(point))
3357        point_dict[point]=i
3358        i+=1
3359
3360
3361    for volume in volumes:
3362        for i in range(len(volume)):
3363            index = volume[i]
3364            if index>-1:
3365                volume[i]=point_dict[same_point[index]]
3366
3367    new_boundary = {}
3368    if not boundary is None:
3369        for segment in boundary.keys():
3370            point0 = point_dict[same_point[segment[0]]]
3371            point1 = point_dict[same_point[segment[1]]]
3372            label = boundary[segment]
3373            #FIXME should the bounday attributes be concaterated
3374            #('exterior, pond') or replaced ('pond')(peter row)
3375
3376            if new_boundary.has_key((point0,point1)):
3377                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3378                                              #+','+label
3379
3380            elif new_boundary.has_key((point1,point0)):
3381                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3382                                              #+','+label
3383            else: new_boundary[(point0,point1)]=label
3384
3385        boundary = new_boundary
3386
3387    return coordinates,volumes,boundary
3388
3389
3390def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3391                 verbose=False):
3392    """Read Digitial Elevation model from the following NetCDF format (.dem)
3393
3394    Example:
3395
3396    ncols         3121
3397    nrows         1800
3398    xllcorner     722000
3399    yllcorner     5893000
3400    cellsize      25
3401    NODATA_value  -9999
3402    138.3698 137.4194 136.5062 135.5558 ..........
3403
3404    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3405    """
3406
3407    import os
3408    from Scientific.IO.NetCDF import NetCDFFile
3409    from Numeric import Float, zeros, sum, reshape, equal
3410
3411    root = basename_in
3412    inname = root + '.dem'
3413
3414    #Open existing netcdf file to read
3415    infile = NetCDFFile(inname, 'r')
3416    if verbose: print 'Reading DEM from %s' %inname
3417
3418    #Read metadata
3419    ncols = infile.ncols[0]
3420    nrows = infile.nrows[0]
3421    xllcorner = infile.xllcorner[0]
3422    yllcorner = infile.yllcorner[0]
3423    cellsize = infile.cellsize[0]
3424    NODATA_value = infile.NODATA_value[0]
3425    zone = infile.zone[0]
3426    false_easting = infile.false_easting[0]
3427    false_northing = infile.false_northing[0]
3428    projection = infile.projection
3429    datum = infile.datum
3430    units = infile.units
3431
3432    dem_elevation = infile.variables['elevation']
3433
3434    #Get output file name
3435    if basename_out == None:
3436        outname = root + '_' + repr(cellsize_new) + '.dem'
3437    else:
3438        outname = basename_out + '.dem'
3439
3440    if verbose: print 'Write decimated NetCDF file to %s' %outname
3441
3442    #Determine some dimensions for decimated grid
3443    (nrows_stencil, ncols_stencil) = stencil.shape
3444    x_offset = ncols_stencil / 2
3445    y_offset = nrows_stencil / 2
3446    cellsize_ratio = int(cellsize_new / cellsize)
3447    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3448    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3449
3450    #Open netcdf file for output
3451    outfile = NetCDFFile(outname, 'w')
3452
3453    #Create new file
3454    outfile.institution = 'Geoscience Australia'
3455    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3456                           'of spatial point data'
3457    #Georeferencing
3458    outfile.zone = zone
3459    outfile.projection = projection
3460    outfile.datum = datum
3461    outfile.units = units
3462
3463    outfile.cellsize = cellsize_new
3464    outfile.NODATA_value = NODATA_value
3465    outfile.false_easting = false_easting
3466    outfile.false_northing = false_northing
3467
3468    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3469    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3470    outfile.ncols = ncols_new
3471    outfile.nrows = nrows_new
3472
3473    # dimension definition
3474    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3475
3476    # variable definition
3477    outfile.createVariable('elevation', Float, ('number_of_points',))
3478
3479    # Get handle to the variable
3480    elevation = outfile.variables['elevation']
3481
3482    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3483
3484    #Store data
3485    global_index = 0
3486    for i in range(nrows_new):
3487        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3488        lower_index = global_index
3489        telev =  zeros(ncols_new, Float)
3490        local_index = 0
3491        trow = i * cellsize_ratio
3492
3493        for j in range(ncols_new):
3494            tcol = j * cellsize_ratio
3495            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3496
3497            #if dem contains 1 or more NODATA_values set value in
3498            #decimated dem to NODATA_value, else compute decimated
3499            #value using stencil
3500            if sum(sum(equal(tmp, NODATA_value))) > 0:
3501                telev[local_index] = NODATA_value
3502            else:
3503                telev[local_index] = sum(sum(tmp * stencil))
3504
3505            global_index += 1
3506            local_index += 1
3507
3508        upper_index = global_index
3509
3510        elevation[lower_index:upper_index] = telev
3511
3512    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3513
3514    infile.close()
3515    outfile.close()
3516
3517
3518
3519
3520def tsh2sww(filename, verbose=False): #test_tsh2sww
3521    """
3522    to check if a tsh/msh file 'looks' good.
3523    """
3524
3525    from shallow_water import Domain
3526    from pmesh2domain import pmesh_to_domain_instance
3527    import time, os
3528    from data_manager import get_dataobject
3529    from os import sep, path
3530    from utilities.numerical_tools import mean
3531
3532    if verbose == True:print 'Creating domain from', filename
3533    domain = pmesh_to_domain_instance(filename, Domain)
3534    if verbose == True:print "Number of triangles = ", len(domain)
3535
3536    domain.smooth = True
3537    domain.format = 'sww'   #Native netcdf visualisation format
3538    file_path, filename = path.split(filename)
3539    filename, ext = path.splitext(filename)
3540    domain.filename = filename
3541    domain.reduction = mean
3542    if verbose == True:print "file_path",file_path
3543    if file_path == "":file_path = "."
3544    domain.set_datadir(file_path)
3545
3546    if verbose == True:
3547        print "Output written to " + domain.get_datadir() + sep + \
3548              domain.filename + "." + domain.format
3549    sww = get_dataobject(domain)
3550    sww.store_connectivity()
3551    sww.store_timestep('stage')
3552
3553
3554def asc_csiro2sww(bath_dir,
3555                  elevation_dir,
3556                  ucur_dir,
3557                  vcur_dir,
3558                  sww_file,
3559                  minlat = None, maxlat = None,
3560                  minlon = None, maxlon = None,
3561                  zscale=1,
3562                  mean_stage = 0,
3563                  fail_on_NaN = True,
3564                  elevation_NaN_filler = 0,
3565                  bath_prefix='ba',
3566                  elevation_prefix='el',
3567                  verbose=False):
3568    """
3569    Produce an sww boundary file, from esri ascii data from CSIRO.
3570
3571    Also convert latitude and longitude to UTM. All coordinates are
3572    assumed to be given in the GDA94 datum.
3573
3574    assume:
3575    All files are in esri ascii format
3576
3577    4 types of information
3578    bathymetry
3579    elevation
3580    u velocity
3581    v velocity
3582
3583    Assumptions
3584    The metadata of all the files is the same
3585    Each type is in a seperate directory
3586    One bath file with extention .000
3587    The time period is less than 24hrs and uniform.
3588    """
3589    from Scientific.IO.NetCDF import NetCDFFile
3590
3591    from coordinate_transforms.redfearn import redfearn
3592
3593    precision = Float # So if we want to change the precision its done here
3594
3595    # go in to the bath dir and load the only file,
3596    bath_files = os.listdir(bath_dir)
3597    #print "bath_files",bath_files
3598
3599    #fixme: make more general?
3600    bath_file = bath_files[0]
3601    bath_dir_file =  bath_dir + os.sep + bath_file
3602    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3603    #print "bath_metadata",bath_metadata
3604    #print "bath_grid",bath_grid
3605
3606    #Use the date.time of the bath file as a basis for
3607    #the start time for other files
3608    base_start = bath_file[-12:]
3609
3610    #go into the elevation dir and load the 000 file
3611    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3612                         + base_start
3613    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3614    #print "elevation_dir_file",elevation_dir_file
3615    #print "elevation_grid", elevation_grid
3616
3617    elevation_files = os.listdir(elevation_dir)
3618    ucur_files = os.listdir(ucur_dir)
3619    vcur_files = os.listdir(vcur_dir)
3620
3621    # the first elevation file should be the
3622    # file with the same base name as the bath data
3623    #print "elevation_files[0]",elevation_files[0]
3624    #print "'el' + base_start",'el' + base_start
3625    assert elevation_files[0] == 'el' + base_start
3626
3627    #assert bath_metadata == elevation_metadata
3628
3629
3630
3631    number_of_latitudes = bath_grid.shape[0]
3632    number_of_longitudes = bath_grid.shape[1]
3633    #number_of_times = len(os.listdir(elevation_dir))
3634    #number_of_points = number_of_latitudes*number_of_longitudes
3635    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3636
3637    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3638                  for x in range(number_of_longitudes)]
3639    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3640                 for y in range(number_of_latitudes)]
3641
3642     # reverse order of lat, so the fist lat represents the first grid row
3643    latitudes.reverse()
3644
3645    #print "latitudes - before _get_min_max_indexes",latitudes
3646    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3647                                                 minlat=minlat, maxlat=maxlat,
3648                                                 minlon=minlon, maxlon=maxlon)
3649
3650
3651    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3652    #print "bath_grid",bath_grid
3653    latitudes = latitudes[kmin:kmax]
3654    longitudes = longitudes[lmin:lmax]
3655    number_of_latitudes = len(latitudes)
3656    number_of_longitudes = len(longitudes)
3657    number_of_times = len(os.listdir(elevation_dir))
3658    number_of_points = number_of_latitudes*number_of_longitudes
3659    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3660    #print "number_of_points",number_of_points
3661
3662    #Work out the times
3663    if len(elevation_files) > 1:
3664        # Assume: The time period is less than 24hrs.
3665        time_period = (int(elevation_files[1][-3:]) - \
3666                      int(elevation_files[0][-3:]))*60*60
3667        times = [x*time_period for x in range(len(elevation_files))]
3668    else:
3669        times = [0.0]
3670    #print "times", times
3671    #print "number_of_latitudes", number_of_latitudes
3672    #print "number_of_longitudes", number_of_longitudes
3673    #print "number_of_times", number_of_times
3674    #print "latitudes", latitudes
3675    #print "longitudes", longitudes
3676
3677
3678    if verbose:
3679        print '------------------------------------------------'
3680        print 'Statistics:'
3681        print '  Extent (lat/lon):'
3682        print '    lat in [%f, %f], len(lat) == %d'\
3683              %(min(latitudes), max(latitudes),
3684                len(latitudes))
3685        print '    lon in [%f, %f], len(lon) == %d'\
3686              %(min(longitudes), max(longitudes),
3687                len(longitudes))
3688        print '    t in [%f, %f], len(t) == %d'\
3689              %(min(times), max(times), len(times))
3690
3691    ######### WRITE THE SWW FILE #############
3692    # NetCDF file definition
3693    outfile = NetCDFFile(sww_file, 'w')
3694
3695    #Create new file
3696    outfile.institution = 'Geoscience Australia'
3697    outfile.description = 'Converted from XXX'
3698
3699
3700    #For sww compatibility
3701    outfile.smoothing = 'Yes'
3702    outfile.order = 1
3703
3704    #Start time in seconds since the epoch (midnight 1/1/1970)
3705    outfile.starttime = starttime = times[0]
3706
3707
3708    # dimension definitions
3709    outfile.createDimension('number_of_volumes', number_of_volumes)
3710
3711    outfile.createDimension('number_of_vertices', 3)
3712    outfile.createDimension('number_of_points', number_of_points)
3713    outfile.createDimension('number_of_timesteps', number_of_times)
3714
3715    # variable definitions
3716    outfile.createVariable('x', precision, ('number_of_points',))
3717    outfile.createVariable('y', precision, ('number_of_points',))
3718    outfile.createVariable('elevation', precision, ('number_of_points',))
3719
3720    #FIXME: Backwards compatibility
3721    outfile.createVariable('z', precision, ('number_of_points',))
3722    #################################
3723
3724    outfile.createVariable('volumes', Int, ('number_of_volumes',
3725                                            'number_of_vertices'))
3726
3727    outfile.createVariable('time', precision,
3728                           ('number_of_timesteps',))
3729
3730    outfile.createVariable('stage', precision,
3731                           ('number_of_timesteps',
3732                            'number_of_points'))
3733
3734    outfile.createVariable('xmomentum', precision,
3735                           ('number_of_timesteps',
3736                            'number_of_points'))
3737
3738    outfile.createVariable('ymomentum', precision,
3739                           ('number_of_timesteps',
3740                            'number_of_points'))
3741
3742    #Store
3743    from coordinate_transforms.redfearn import redfearn
3744    x = zeros(number_of_points, Float)  #Easting
3745    y = zeros(number_of_points, Float)  #Northing
3746
3747    if verbose: print 'Making triangular grid'
3748    #Get zone of 1st point.
3749    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3750
3751    vertices = {}
3752    i = 0
3753    for k, lat in enumerate(latitudes):
3754        for l, lon in enumerate(longitudes):
3755
3756            vertices[l,k] = i
3757
3758            zone, easting, northing = redfearn(lat,lon)
3759
3760            msg = 'Zone boundary crossed at longitude =', lon
3761            #assert zone == refzone, msg
3762            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3763            x[i] = easting
3764            y[i] = northing
3765            i += 1
3766
3767
3768    #Construct 2 triangles per 'rectangular' element
3769    volumes = []
3770    for l in range(number_of_longitudes-1):    #X direction
3771        for k in range(number_of_latitudes-1): #Y direction
3772            v1 = vertices[l,k+1]
3773            v2 = vertices[l,k]
3774            v3 = vertices[l+1,k+1]
3775            v4 = vertices[l+1,k]
3776
3777            #Note, this is different to the ferrit2sww code
3778            #since the order of the lats is reversed.
3779            volumes.append([v1,v3,v2]) #Upper element
3780            volumes.append([v4,v2,v3]) #Lower element
3781
3782    volumes = array(volumes)
3783
3784    geo_ref = Geo_reference(refzone,min(x),min(y))
3785    geo_ref.write_NetCDF(outfile)
3786
3787    # This will put the geo ref in the middle
3788    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3789
3790
3791    if verbose:
3792        print '------------------------------------------------'
3793        print 'More Statistics:'
3794        print '  Extent (/lon):'
3795        print '    x in [%f, %f], len(lat) == %d'\
3796              %(min(x), max(x),
3797                len(x))
3798        print '    y in [%f, %f], len(lon) == %d'\
3799              %(min(y), max(y),
3800                len(y))
3801        print 'geo_ref: ',geo_ref
3802
3803    z = resize(bath_grid,outfile.variables['z'][:].shape)
3804    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3805    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3806    outfile.variables['z'][:] = z
3807    outfile.variables['elevation'][:] = z  #FIXME HACK
3808    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3809
3810    # do this to create an ok sww file.
3811    #outfile.variables['time'][:] = [0]   #Store time relative
3812    #outfile.variables['stage'] = z
3813    # put the next line up in the code after outfile.order = 1
3814    #number_of_times = 1
3815
3816    stage = outfile.variables['stage']
3817    xmomentum = outfile.variables['xmomentum']
3818    ymomentum = outfile.variables['ymomentum']
3819
3820    outfile.variables['time'][:] = times   #Store time relative
3821
3822    if verbose: print 'Converting quantities'
3823    n = number_of_times
3824    for j in range(number_of_times):
3825        # load in files
3826        elevation_meta, elevation_grid = \
3827           _read_asc(elevation_dir + os.sep + elevation_files[j])
3828
3829        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3830        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3831
3832        #print "elevation_grid",elevation_grid
3833        #cut matrix to desired size
3834        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3835        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3836        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3837        #print "elevation_grid",elevation_grid
3838        # handle missing values
3839        missing = (elevation_grid == elevation_meta['NODATA_value'])
3840        if sometrue (missing):
3841            if fail_on_NaN:
3842                msg = 'File %s contains missing values'\
3843                      %(elevation_files[j])
3844                raise DataMissingValuesError, msg
3845            else:
3846                elevation_grid = elevation_grid*(missing==0) + \
3847                                 missing*elevation_NaN_filler
3848
3849
3850        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3851        i = 0
3852        for k in range(number_of_latitudes):      #Y direction
3853            for l in range(number_of_longitudes): #X direction
3854                w = zscale*elevation_grid[k,l] + mean_stage
3855                stage[j,i] = w
3856                h = w - z[i]
3857                xmomentum[j,i] = u_momentum_grid[k,l]*h
3858                ymomentum[j,i] = v_momentum_grid[k,l]*h
3859                i += 1
3860    outfile.close()
3861
3862def _get_min_max_indexes(latitudes,longitudes,
3863                        minlat=None, maxlat=None,
3864                        minlon=None, maxlon=None):
3865    """
3866    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3867    specified with min/max lat/long
3868
3869    Think of the latitudes and longitudes describing a 2d surface.
3870    The area returned is, if possible, just big enough to cover the
3871    inputed max/min area. (This will not be possible if the max/min area
3872    has a section outside of the latitudes/longitudes area.)
3873
3874    assume latitudess & longitudes are sorted,
3875    long - from low to high (west to east, eg 148 - 151)
3876    lat - from high to low (north to south, eg -35 - -38)
3877    """
3878
3879    # reverse order of lat, so it's in ascending order
3880    latitudes.reverse()
3881    largest_lat_index = len(latitudes)-1
3882    #Cut out a smaller extent.
3883    if minlat == None:
3884        lat_min_index = 0
3885    else:
3886        lat_min_index = searchsorted(latitudes, minlat)-1
3887        if lat_min_index <0:
3888            lat_min_index = 0
3889
3890
3891    if maxlat == None:
3892        lat_max_index = largest_lat_index #len(latitudes)
3893    else:
3894        lat_max_index = searchsorted(latitudes, maxlat)
3895        if lat_max_index > largest_lat_index:
3896            lat_max_index = largest_lat_index
3897
3898    if minlon == None:
3899        lon_min_index = 0
3900    else:
3901        lon_min_index = searchsorted(longitudes, minlon)-1
3902        if lon_min_index <0:
3903            lon_min_index = 0
3904
3905    if maxlon == None:
3906        lon_max_index = len(longitudes)
3907    else:
3908        lon_max_index = searchsorted(longitudes, maxlon)
3909
3910    #Take into account that the latitude list was reversed
3911    latitudes.reverse() # Python passes by reference, need to swap back
3912    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3913                                   largest_lat_index - lat_min_index
3914    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3915    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3916
3917    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3918
3919
3920def _read_asc(filename, verbose=False):
3921    """Read esri file from the following ASCII format (.asc)
3922
3923    Example:
3924
3925    ncols         3121
3926    nrows         1800
3927    xllcorner     722000
3928    yllcorner     5893000
3929    cellsize      25
3930    NODATA_value  -9999
3931    138.3698 137.4194 136.5062 135.5558 ..........
3932
3933    """
3934
3935    datafile = open(filename)
3936
3937    if verbose: print 'Reading DEM from %s' %(filename)
3938    lines = datafile.readlines()
3939    datafile.close()
3940
3941    if verbose: print 'Got', len(lines), ' lines'
3942
3943    ncols = int(lines.pop(0).split()[1].strip())
3944    nrows = int(lines.pop(0).split()[1].strip())
3945    xllcorner = float(lines.pop(0).split()[1].strip())
3946    yllcorner = float(lines.pop(0).split()[1].strip())
3947    cellsize = float(lines.pop(0).split()[1].strip())
3948    NODATA_value = float(lines.pop(0).split()[1].strip())
3949
3950    assert len(lines) == nrows
3951
3952    #Store data
3953    grid = []
3954
3955    n = len(lines)
3956    for i, line in enumerate(lines):
3957        cells = line.split()
3958        assert len(cells) == ncols
3959        grid.append(array([float(x) for x in cells]))
3960    grid = array(grid)
3961
3962    return {'xllcorner':xllcorner,
3963            'yllcorner':yllcorner,
3964            'cellsize':cellsize,
3965            'NODATA_value':NODATA_value}, grid
3966
3967
3968# FIXME (Ole): Is this doing anything at all?
3969def sww2timeseries(swwfile,
3970                   gauge_filename,
3971                   gauge_data_outname,
3972                   quantity = None,
3973                   time_min = None,
3974                   time_max = None,
3975                   verbose = False):
3976
3977    """Read SWW file and extract time series for prescribed quantities at
3978    gauge locations.
3979
3980    The gauge locations are defined in gauge_filename. This file should be
3981    in the form: gaugename, easting, northing, and should be stored in a
3982    .csv or .xya file.
3983
3984    Time series data at the gauges can be written to file (for example,
3985    Benfield requested raw data) with the default being no data written.
3986
3987    The parameter quantity must be the name of an existing quantity or
3988    an expression involving existing quantities. The default is
3989    'depth'.
3990
3991    The user can define a list of quantities. The possibilities are
3992    the conserved quantitues of the shallow water wave equation and other
3993    quantities which can be derived from those, i.e.
3994    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3995
3996    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3997    Note, units of momentum are m^2/s and depth is m.
3998
3999    Velocity is absolute momentum divided by depth. (Confirming correct units:
4000    vel = abs mom / depth = (m^2/s)/m = m/s.)
4001
4002    Bearing returns the angle of the velocity vector from North.
4003
4004    If time_min and time_max is given, output plotted in that time range.
4005    The default is to plot the entire range of the time evident in sww file.
4006
4007    The export graphic format in 'png' and will be stored in the same
4008    directory as the input file.
4009    """
4010
4011    if quantity is None: quantity = 'depth'
4012
4013    # extract gauge locations from gauge file
4014
4015    # extract all quantities from swwfile (f = file_function)
4016    if time_min is None: time_min = min(f.get_time())
4017    if time_max is None: time_max = max(f.get_time())
4018
4019    # loop through appropriate range of time
4020    # plot prescribed quantities and export data if requested
4021
4022    #if gauge_data_outname is None:
4023
4024    return
4025
4026#-------------------------------------------------------------
4027if __name__ == "__main__":
4028    pass
4029
Note: See TracBrowser for help on using the repository browser.