source: inundation/pyvolution/data_manager.py @ 3315

Last change on this file since 3315 was 3312, checked in by duncan, 19 years ago

bug fix

File size: 121.2 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62
63from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
64     searchsorted, zeros, allclose, around
65
66
67from coordinate_transforms.geo_reference import Geo_reference
68from geospatial_data.geospatial_data import Geospatial_data
69
70
71def make_filename(s):
72    """Transform argument string into a suitable filename
73    """
74
75    s = s.strip()
76    s = s.replace(' ', '_')
77    s = s.replace('(', '')
78    s = s.replace(')', '')
79    s = s.replace('__', '_')
80
81    return s
82
83
84def check_dir(path, verbose=None):
85    """Check that specified path exists.
86    If path does not exist it will be created if possible
87
88    USAGE:
89       checkdir(path, verbose):
90
91    ARGUMENTS:
92        path -- Directory
93        verbose -- Flag verbose output (default: None)
94
95    RETURN VALUE:
96        Verified path including trailing separator
97
98    """
99
100    import os, sys
101    import os.path
102
103    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
104        unix = 0
105    else:
106        unix = 1
107
108
109    if path[-1] != os.sep:
110        path = path + os.sep  # Add separator for directories
111
112    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
113    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
114        try:
115            exitcode=os.mkdir(path)
116
117            # Change access rights if possible
118            #
119            if unix:
120                exitcode=os.system('chmod 775 '+path)
121            else:
122                pass  # FIXME: What about acces rights under Windows?
123
124            if verbose: print 'MESSAGE: Directory', path, 'created.'
125
126        except:
127            print 'WARNING: Directory', path, 'could not be created.'
128            if unix:
129                path = '/tmp/'
130            else:
131                path = 'C:'
132
133            print 'Using directory %s instead' %path
134
135    return(path)
136
137
138
139def del_dir(path):
140    """Recursively delete directory path and all its contents
141    """
142
143    import os
144
145    if os.path.isdir(path):
146        for file in os.listdir(path):
147            X = os.path.join(path, file)
148
149
150            if os.path.isdir(X) and not os.path.islink(X):
151                del_dir(X)
152            else:
153                try:
154                    os.remove(X)
155                except:
156                    print "Could not remove file %s" %X
157
158        os.rmdir(path)
159
160
161
162def create_filename(datadir, filename, format, size=None, time=None):
163
164    import os
165    #from config import data_dir
166
167    FN = check_dir(datadir) + filename
168
169    if size is not None:
170        FN += '_size%d' %size
171
172    if time is not None:
173        FN += '_time%.2f' %time
174
175    FN += '.' + format
176    return FN
177
178
179def get_files(datadir, filename, format, size):
180    """Get all file (names) with gven name, size and format
181    """
182
183    import glob
184
185    import os
186    #from config import data_dir
187
188    dir = check_dir(datadir)
189
190    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
191    return glob.glob(pattern)
192
193
194
195#Generic class for storing output to e.g. visualisation or checkpointing
196class Data_format:
197    """Generic interface to data formats
198    """
199
200
201    def __init__(self, domain, extension, mode = 'w'):
202        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
203                                        '''   'w' (write)'''+\
204                                        '''   'r' (read)''' +\
205                                        '''   'a' (append)'''
206
207        #Create filename
208        #self.filename = create_filename(domain.get_datadir(),
209        #domain.get_name(), extension, len(domain))
210
211
212        self.filename = create_filename(domain.get_datadir(),
213            domain.get_name(), extension)
214
215        #print 'F', self.filename
216        self.timestep = 0
217        self.number_of_volumes = len(domain)
218        self.domain = domain
219
220
221        #FIXME: Should we have a general set_precision function?
222
223
224
225#Class for storing output to e.g. visualisation
226class Data_format_sww(Data_format):
227    """Interface to native NetCDF format (.sww) for storing model output
228
229    There are two kinds of data
230
231    1: Constant data: Vertex coordinates and field values. Stored once
232    2: Variable data: Conserved quantities. Stored once per timestep.
233
234    All data is assumed to reside at vertex locations.
235    """
236
237
238    def __init__(self, domain, mode = 'w',\
239                 max_size = 2000000000,
240                 recursion = False):
241        from Scientific.IO.NetCDF import NetCDFFile
242        from Numeric import Int, Float, Float32
243
244        self.precision = Float32 #Use single precision for quantities
245        if hasattr(domain, 'max_size'):
246            self.max_size = domain.max_size #file size max is 2Gig
247        else:
248            self.max_size = max_size
249        self.recursion = recursion
250        self.mode = mode
251
252        Data_format.__init__(self, domain, 'sww', mode)
253
254
255        # NetCDF file definition
256        fid = NetCDFFile(self.filename, mode)
257
258        if mode == 'w':
259
260            #Create new file
261            fid.institution = 'Geoscience Australia'
262            fid.description = 'Output from pyvolution suitable for plotting'
263
264            if domain.smooth:
265                fid.smoothing = 'Yes'
266            else:
267                fid.smoothing = 'No'
268
269            fid.order = domain.default_order
270
271            if hasattr(domain, 'texture'):
272                fid.texture = domain.texture
273        #else:
274            #    fid.texture = 'None'
275
276            #Reference point
277            #Start time in seconds since the epoch (midnight 1/1/1970)
278            #FIXME: Use Georef
279            fid.starttime = domain.starttime
280
281            # dimension definitions
282            fid.createDimension('number_of_volumes', self.number_of_volumes)
283            fid.createDimension('number_of_vertices', 3)
284
285            if domain.smooth is True:
286                fid.createDimension('number_of_points', len(domain.vertexlist))
287            else:
288                fid.createDimension('number_of_points', 3*self.number_of_volumes)
289
290            fid.createDimension('number_of_timesteps', None) #extensible
291
292            # variable definitions
293            fid.createVariable('x', self.precision, ('number_of_points',))
294            fid.createVariable('y', self.precision, ('number_of_points',))
295            fid.createVariable('elevation', self.precision, ('number_of_points',))
296            if domain.geo_reference is not None:
297                domain.geo_reference.write_NetCDF(fid)
298
299            #FIXME: Backwards compatibility
300            fid.createVariable('z', self.precision, ('number_of_points',))
301            #################################
302
303            fid.createVariable('volumes', Int, ('number_of_volumes',
304                                                'number_of_vertices'))
305
306            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
307                                               # close to each other may appear as the same step
308                               ('number_of_timesteps',))
309
310            fid.createVariable('stage', self.precision,
311                               ('number_of_timesteps',
312                                'number_of_points'))
313
314            fid.createVariable('xmomentum', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318            fid.createVariable('ymomentum', self.precision,
319                               ('number_of_timesteps',
320                                'number_of_points'))
321
322        #Close
323        fid.close()
324
325
326    def store_connectivity(self):
327        """Specialisation of store_connectivity for net CDF format
328
329        Writes x,y,z coordinates of triangles constituting
330        the bed elevation.
331        """
332
333        from Scientific.IO.NetCDF import NetCDFFile
334
335        from Numeric import concatenate, Int
336
337        domain = self.domain
338
339        #Get NetCDF
340        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
341
342        # Get the variables
343        x = fid.variables['x']
344        y = fid.variables['y']
345        z = fid.variables['elevation']
346
347        volumes = fid.variables['volumes']
348
349        # Get X, Y and bed elevation Z
350        Q = domain.quantities['elevation']
351        X,Y,Z,V = Q.get_vertex_values(xy=True,
352                      precision = self.precision)
353
354
355
356        x[:] = X.astype(self.precision)
357        y[:] = Y.astype(self.precision)
358        z[:] = Z.astype(self.precision)
359
360        #FIXME: Backwards compatibility
361        z = fid.variables['z']
362        z[:] = Z.astype(self.precision)
363        ################################
364
365        volumes[:] = V.astype(volumes.typecode())
366
367        #Close
368        fid.close()
369
370    def store_timestep(self, names):
371        """Store time and named quantities to file
372        """
373        from Scientific.IO.NetCDF import NetCDFFile
374        import types
375        from time import sleep
376        from os import stat
377
378        minimum_allowed_depth = 0.001
379        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
380        from Numeric import choose
381
382        #Get NetCDF
383        retries = 0
384        file_open = False
385        while not file_open and retries < 10:
386            try:
387                fid = NetCDFFile(self.filename, 'a') #Open existing file
388            except IOError:
389                #This could happen if someone was reading the file.
390                #In that case, wait a while and try again
391                msg = 'Warning (store_timestep): File %s could not be opened'\
392                      %self.filename
393                msg += ' - trying step %s again' %self.domain.time
394                print msg
395                retries += 1
396                sleep(1)
397            else:
398                file_open = True
399
400        if not file_open:
401            msg = 'File %s could not be opened for append' %self.filename
402            raise DataFileNotOpenError, msg
403
404
405
406        #Check to see if the file is already too big:
407        time = fid.variables['time']
408        i = len(time)+1
409        file_size = stat(self.filename)[6]
410        file_size_increase =  file_size/i
411        if file_size + file_size_increase > self.max_size*(2**self.recursion):
412            #in order to get the file name and start time correct,
413            #I change the domian.filename and domain.starttime.
414            #This is the only way to do this without changing
415            #other modules (I think).
416
417            #write a filename addon that won't break swollens reader
418            #(10.sww is bad)
419            filename_ext = '_time_%s'%self.domain.time
420            filename_ext = filename_ext.replace('.', '_')
421            #remember the old filename, then give domain a
422            #name with the extension
423            old_domain_filename = self.domain.filename
424            if not self.recursion:
425                self.domain.filename = self.domain.filename+filename_ext
426
427            #change the domain starttime to the current time
428            old_domain_starttime = self.domain.starttime
429            self.domain.starttime = self.domain.time
430
431            #build a new data_structure.
432            next_data_structure=\
433                Data_format_sww(self.domain, mode=self.mode,\
434                                max_size = self.max_size,\
435                                recursion = self.recursion+1)
436            if not self.recursion:
437                print '    file_size = %s'%file_size
438                print '    saving file to %s'%next_data_structure.filename
439            #set up the new data_structure
440            self.domain.writer = next_data_structure
441
442            #FIXME - could be cleaner to use domain.store_timestep etc.
443            next_data_structure.store_connectivity()
444            next_data_structure.store_timestep(names)
445            fid.sync()
446            fid.close()
447
448            #restore the old starttime and filename
449            self.domain.starttime = old_domain_starttime
450            self.domain.filename = old_domain_filename
451        else:
452            self.recursion = False
453            domain = self.domain
454
455            # Get the variables
456            time = fid.variables['time']
457            stage = fid.variables['stage']
458            xmomentum = fid.variables['xmomentum']
459            ymomentum = fid.variables['ymomentum']
460            i = len(time)
461
462            #Store time
463            time[i] = self.domain.time
464
465
466            if type(names) not in [types.ListType, types.TupleType]:
467                names = [names]
468
469            for name in names:
470                # Get quantity
471                Q = domain.quantities[name]
472                A,V = Q.get_vertex_values(xy = False,
473                                          precision = self.precision)
474
475                #FIXME: Make this general (see below)
476                if name == 'stage':
477                    z = fid.variables['elevation']
478                    #print z[:]
479                    #print A-z[:]
480                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
481                    stage[i,:] = A.astype(self.precision)
482                elif name == 'xmomentum':
483                    xmomentum[i,:] = A.astype(self.precision)
484                elif name == 'ymomentum':
485                    ymomentum[i,:] = A.astype(self.precision)
486
487                #As in....
488            #eval( name + '[i,:] = A.astype(self.precision)' )
489            #FIXME: But we need a UNIT test for that before refactoring
490
491
492
493            #Flush and close
494            fid.sync()
495            fid.close()
496
497
498
499#Class for handling checkpoints data
500class Data_format_cpt(Data_format):
501    """Interface to native NetCDF format (.cpt)
502    """
503
504
505    def __init__(self, domain, mode = 'w'):
506        from Scientific.IO.NetCDF import NetCDFFile
507        from Numeric import Int, Float, Float
508
509        self.precision = Float #Use full precision
510
511        Data_format.__init__(self, domain, 'sww', mode)
512
513
514        # NetCDF file definition
515        fid = NetCDFFile(self.filename, mode)
516
517        if mode == 'w':
518            #Create new file
519            fid.institution = 'Geoscience Australia'
520            fid.description = 'Checkpoint data'
521            #fid.smooth = domain.smooth
522            fid.order = domain.default_order
523
524            # dimension definitions
525            fid.createDimension('number_of_volumes', self.number_of_volumes)
526            fid.createDimension('number_of_vertices', 3)
527
528            #Store info at all vertices (no smoothing)
529            fid.createDimension('number_of_points', 3*self.number_of_volumes)
530            fid.createDimension('number_of_timesteps', None) #extensible
531
532            # variable definitions
533
534            #Mesh
535            fid.createVariable('x', self.precision, ('number_of_points',))
536            fid.createVariable('y', self.precision, ('number_of_points',))
537
538
539            fid.createVariable('volumes', Int, ('number_of_volumes',
540                                                'number_of_vertices'))
541
542            fid.createVariable('time', self.precision,
543                               ('number_of_timesteps',))
544
545            #Allocate space for all quantities
546            for name in domain.quantities.keys():
547                fid.createVariable(name, self.precision,
548                                   ('number_of_timesteps',
549                                    'number_of_points'))
550
551        #Close
552        fid.close()
553
554
555    def store_checkpoint(self):
556        """
557        Write x,y coordinates of triangles.
558        Write connectivity (
559        constituting
560        the bed elevation.
561        """
562
563        from Scientific.IO.NetCDF import NetCDFFile
564
565        from Numeric import concatenate
566
567        domain = self.domain
568
569        #Get NetCDF
570        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
571
572        # Get the variables
573        x = fid.variables['x']
574        y = fid.variables['y']
575
576        volumes = fid.variables['volumes']
577
578        # Get X, Y and bed elevation Z
579        Q = domain.quantities['elevation']
580        X,Y,Z,V = Q.get_vertex_values(xy=True,
581                      precision = self.precision)
582
583
584
585        x[:] = X.astype(self.precision)
586        y[:] = Y.astype(self.precision)
587        z[:] = Z.astype(self.precision)
588
589        volumes[:] = V
590
591        #Close
592        fid.close()
593
594
595    def store_timestep(self, name):
596        """Store time and named quantity to file
597        """
598        from Scientific.IO.NetCDF import NetCDFFile
599        from time import sleep
600
601        #Get NetCDF
602        retries = 0
603        file_open = False
604        while not file_open and retries < 10:
605            try:
606                fid = NetCDFFile(self.filename, 'a') #Open existing file
607            except IOError:
608                #This could happen if someone was reading the file.
609                #In that case, wait a while and try again
610                msg = 'Warning (store_timestep): File %s could not be opened'\
611                  %self.filename
612                msg += ' - trying again'
613                print msg
614                retries += 1
615                sleep(1)
616            else:
617                file_open = True
618
619        if not file_open:
620            msg = 'File %s could not be opened for append' %self.filename
621            raise DataFileNotOPenError, msg
622
623
624        domain = self.domain
625
626        # Get the variables
627        time = fid.variables['time']
628        stage = fid.variables['stage']
629        i = len(time)
630
631        #Store stage
632        time[i] = self.domain.time
633
634        # Get quantity
635        Q = domain.quantities[name]
636        A,V = Q.get_vertex_values(xy=False,
637                  precision = self.precision)
638
639        stage[i,:] = A.astype(self.precision)
640
641        #Flush and close
642        fid.sync()
643        fid.close()
644
645
646
647
648
649#Function for storing xya output
650#FIXME Not done yet for this version
651#This is obsolete.  Use geo_spatial_data instead
652class Data_format_xya(Data_format):
653    """Generic interface to data formats
654    """
655
656
657    def __init__(self, domain, mode = 'w'):
658        from Scientific.IO.NetCDF import NetCDFFile
659        from Numeric import Int, Float, Float32
660
661        self.precision = Float32 #Use single precision
662
663        Data_format.__init__(self, domain, 'xya', mode)
664
665
666
667    #FIXME -This is the old xya format
668    def store_all(self):
669        """Specialisation of store all for xya format
670
671        Writes x,y,z coordinates of triangles constituting
672        the bed elevation.
673        """
674
675        from Numeric import concatenate
676
677        domain = self.domain
678
679        fd = open(self.filename, 'w')
680
681
682        if domain.smooth is True:
683            number_of_points =  len(domain.vertexlist)
684        else:
685            number_of_points = 3*self.number_of_volumes
686
687        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
688
689        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
690                 " # <vertex #> <x> <y> [attributes]" + "\n")
691
692
693        # Get X, Y, bed elevation and friction (index=0,1)
694        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
695                                           indices = (0,1), precision = self.precision)
696
697        bed_eles = A[:,0]
698        fricts = A[:,1]
699
700        # Get stage (index=0)
701        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
702                                       indices = (0,), precision = self.precision)
703
704        stages = B[:,0]
705
706        #<vertex #> <x> <y> [attributes]
707        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
708                                               stages, fricts):
709
710            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
711            fd.write(s)
712
713        #close
714        fd.close()
715
716
717    def store_timestep(self, t, V0, V1, V2):
718        """Store time, water heights (and momentums) to file
719        """
720        pass
721
722
723#### NBED national exposure database
724   
725LAT_TITLE = 'LATITUDE'
726LONG_TITLE = 'LONGITUDE'
727class Exposure_csv:
728    def __init__(self,file_name, latitude_title=LAT_TITLE,
729                 longitude_title=LONG_TITLE, refine_polygon=None):
730        """
731        This class is for handling the exposure csv file.
732        It reads the file in and converts the lats and longs to a geospatial
733        data object.
734        Use the methods to read and write columns.
735
736        The format of the csv files it reads is;
737           The first row is a title row.
738           comma's are the delimiters
739           each column is a 'set' of data
740
741        Feel free to use/expand it to read other csv files.
742           
743           
744        It is not for adding and deleting rows
745       
746        Can geospatial handle string attributes? It's not made for them.
747        Currently it can't load and save string att's.
748
749        So just use geospatial to hold the x, y and georef? Bad, since
750        different att's are in diferent structures.  Not so bad, the info
751        to write if the .csv file is saved is in attribute_dic
752
753        The location info is in the geospatial attribute.
754       
755       
756        """
757        self._file_name = file_name
758        self._geospatial = None #
759
760        # self._attribute_dic is a dictionary.
761        #The keys are the column titles.
762        #The values are lists of column data
763       
764        # self._title_index_dic is a dictionary.
765        #The keys are the column titles.
766        #The values are the index positions of file columns.
767        self._attribute_dic, self._title_index_dic = \
768        self._load_exposure_csv(self._file_name)
769        try:
770            lats = self._attribute_dic[latitude_title]
771            longs = self._attribute_dic[longitude_title]
772           
773        except KeyError:
774            # maybe a warning..
775            pass
776        else:
777            self._geospatial = Geospatial_data(latitudes = lats,
778                 longitudes = longs)
779           
780        # create a list of points that are in the refining_polygon
781        # described by a list of indexes representing the points
782
783    def __cmp__(self, other):
784        #print "self._attribute_dic",self._attribute_dic
785        #print "other._attribute_dic",other._attribute_dic
786        #print "self._title_index_dic", self._title_index_dic
787        #print "other._title_index_dic", other._title_index_dic
788       
789        #check that a is an instance of this class
790        if isinstance(self, type(other)):
791            result = cmp(self._attribute_dic, other._attribute_dic)
792            if result <>0:
793                return result
794            # The order of the columns is important. Therefore..
795            result = cmp(self._title_index_dic, other._title_index_dic)
796            if result <>0:
797                return result
798            for self_ls, other_ls in map(None,self._attribute_dic, \
799                    other._attribute_dic):
800                result = cmp(self._attribute_dic[self_ls],
801                             other._attribute_dic[other_ls])
802                if result <>0:
803                    return result
804            return 0
805        else:
806            return 1
807   
808    def _load_exposure_csv(self, file_name):
809        """
810        Load in the csv as a dic, title as key and column info as value, .
811        Also, create a dic, title as key and column index as value.
812        """
813        #
814        attribute_dic = {}
815        title_index_dic = {}
816        titles_stripped = [] # list of titles
817       
818        reader = csv.reader(file(file_name))
819
820        # Read in and manipulate the title info
821        titles = reader.next()
822        for i,title in enumerate(titles):
823            titles_stripped.append(title.strip())
824            title_index_dic[title.strip()] = i
825        title_count = len(titles_stripped)       
826        #print "title_index_dic",title_index_dic
827
828       
829        #create a dic of colum values, indexed by column title
830        for line in reader:
831            if len(line) <> title_count:
832                raise IOError #FIXME make this nicer
833            for i, value in enumerate(line):
834                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
835           
836        return attribute_dic, title_index_dic
837
838    def get_column(self, column_name, use_refind_polygon=False):
839        """
840        Given a column name return a list of the column values
841
842        Note, the type of the values will be String!
843        Thinking of semi-hard coding this to not be so..
844       
845        Not implemented:
846        if use_refind_polygon is True, only return values in the
847        refined polygon
848        """
849        if not self._attribute_dic.has_key(column_name):
850            msg = 'Therer is  no column called %s!' %column_name
851            raise TitleValueError, msg
852        return self._attribute_dic[column_name]
853
854    def get_location(self, use_refind_polygon=False):
855        """
856        Return a geospatial object which describes the
857        locations of the location file.
858
859        Note, if there is not location info, this returns None.
860       
861        Not implemented:
862        if use_refind_polygon is True, only return values in the
863        refined polygon
864        """
865        return self._geospatial
866
867    def set_column(self, column_name, column_values, overwrite=False):
868        """
869        Add a column to the 'end' (with the right most column being the end)
870        of the csv file.
871
872        Set overwrite to True if you want to overwrite a column.
873       
874        Note, in column_name white space is removed and case is not checked.
875        Precondition
876        The column_name and column_values cannot have comma's in it.
877        """
878       
879        value_row_count = \
880            len(self._attribute_dic[self._title_index_dic.keys()[0]])
881        if len(column_values) <> value_row_count: 
882            msg = 'The number of column values must equal the number of rows.'
883            raise DataMissingValuesError, msg
884       
885        if self._attribute_dic.has_key(column_name):
886            if not overwrite:
887                msg = 'Column name %s already in use!' %column_name
888                raise TitleValueError, msg
889        else:
890            # New title.  Add it to the title index.
891            self._title_index_dic[column_name] = len(self._title_index_dic)
892        self._attribute_dic[column_name] = column_values
893        #print "self._title_index_dic[column_name]",self._title_index_dic
894
895    def save(self, file_name=None):
896        """
897        Save the exposure csv file
898        """
899        if file_name is None:
900            file_name = self._file_name
901       
902        fd = open(file_name,'wb')
903        writer = csv.writer(fd)
904       
905        #Write the title to a cvs file
906        line = [None]* len(self._title_index_dic)
907        for title in self._title_index_dic.iterkeys():
908            line[self._title_index_dic[title]]= title
909        writer.writerow(line)
910       
911        # Write the values to a cvs file
912        value_row_count = \
913            len(self._attribute_dic[self._title_index_dic.keys()[0]])
914        for row_i in range(value_row_count):
915            line = [None]* len(self._title_index_dic)
916            for title in self._title_index_dic.iterkeys():
917                line[self._title_index_dic[title]]= \
918                     self._attribute_dic[title][row_i]
919            writer.writerow(line)
920
921
922#Auxiliary
923def write_obj(filename,x,y,z):
924    """Store x,y,z vectors into filename (obj format)
925       Vectors are assumed to have dimension (M,3) where
926       M corresponds to the number elements.
927       triangles are assumed to be disconnected
928
929       The three numbers in each vector correspond to three vertices,
930
931       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
932
933    """
934    #print 'Writing obj to %s' % filename
935
936    import os.path
937
938    root, ext = os.path.splitext(filename)
939    if ext == '.obj':
940        FN = filename
941    else:
942        FN = filename + '.obj'
943
944
945    outfile = open(FN, 'wb')
946    outfile.write("# Triangulation as an obj file\n")
947
948    M, N = x.shape
949    assert N==3  #Assuming three vertices per element
950
951    for i in range(M):
952        for j in range(N):
953            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
954
955    for i in range(M):
956        base = i*N
957        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
958
959    outfile.close()
960
961
962#########################################################
963#Conversion routines
964########################################################
965
966def sww2obj(basefilename, size):
967    """Convert netcdf based data output to obj
968    """
969    from Scientific.IO.NetCDF import NetCDFFile
970
971    from Numeric import Float, zeros
972
973    #Get NetCDF
974    FN = create_filename('.', basefilename, 'sww', size)
975    print 'Reading from ', FN
976    fid = NetCDFFile(FN, 'r')  #Open existing file for read
977
978
979    # Get the variables
980    x = fid.variables['x']
981    y = fid.variables['y']
982    z = fid.variables['elevation']
983    time = fid.variables['time']
984    stage = fid.variables['stage']
985
986    M = size  #Number of lines
987    xx = zeros((M,3), Float)
988    yy = zeros((M,3), Float)
989    zz = zeros((M,3), Float)
990
991    for i in range(M):
992        for j in range(3):
993            xx[i,j] = x[i+j*M]
994            yy[i,j] = y[i+j*M]
995            zz[i,j] = z[i+j*M]
996
997    #Write obj for bathymetry
998    FN = create_filename('.', basefilename, 'obj', size)
999    write_obj(FN,xx,yy,zz)
1000
1001
1002    #Now read all the data with variable information, combine with
1003    #x,y info and store as obj
1004
1005    for k in range(len(time)):
1006        t = time[k]
1007        print 'Processing timestep %f' %t
1008
1009        for i in range(M):
1010            for j in range(3):
1011                zz[i,j] = stage[k,i+j*M]
1012
1013
1014        #Write obj for variable data
1015        #FN = create_filename(basefilename, 'obj', size, time=t)
1016        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1017        write_obj(FN,xx,yy,zz)
1018
1019
1020def dat2obj(basefilename):
1021    """Convert line based data output to obj
1022    FIXME: Obsolete?
1023    """
1024
1025    import glob, os
1026    from config import data_dir
1027
1028
1029    #Get bathymetry and x,y's
1030    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1031
1032    from Numeric import zeros, Float
1033
1034    M = len(lines)  #Number of lines
1035    x = zeros((M,3), Float)
1036    y = zeros((M,3), Float)
1037    z = zeros((M,3), Float)
1038
1039    ##i = 0
1040    for i, line in enumerate(lines):
1041        tokens = line.split()
1042        values = map(float,tokens)
1043
1044        for j in range(3):
1045            x[i,j] = values[j*3]
1046            y[i,j] = values[j*3+1]
1047            z[i,j] = values[j*3+2]
1048
1049        ##i += 1
1050
1051
1052    #Write obj for bathymetry
1053    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1054
1055
1056    #Now read all the data files with variable information, combine with
1057    #x,y info
1058    #and store as obj
1059
1060    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1061
1062    for filename in files:
1063        print 'Processing %s' % filename
1064
1065        lines = open(data_dir+os.sep+filename,'r').readlines()
1066        assert len(lines) == M
1067        root, ext = os.path.splitext(filename)
1068
1069        #Get time from filename
1070        i0 = filename.find('_time=')
1071        if i0 == -1:
1072            #Skip bathymetry file
1073            continue
1074
1075        i0 += 6  #Position where time starts
1076        i1 = filename.find('.dat')
1077
1078        if i1 > i0:
1079            t = float(filename[i0:i1])
1080        else:
1081            raise DataTimeError, 'Hmmmm'
1082
1083
1084
1085        ##i = 0
1086        for i, line in enumerate(lines):
1087            tokens = line.split()
1088            values = map(float,tokens)
1089
1090            for j in range(3):
1091                z[i,j] = values[j]
1092
1093            ##i += 1
1094
1095        #Write obj for variable data
1096        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1097
1098
1099def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1100    """Read netcdf filename1, pick timesteps first:step:last and save to
1101    nettcdf file filename2
1102    """
1103    from Scientific.IO.NetCDF import NetCDFFile
1104
1105    #Get NetCDF
1106    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1107    outfile = NetCDFFile(filename2, 'w')  #Open new file
1108
1109
1110    #Copy dimensions
1111    for d in infile.dimensions:
1112        outfile.createDimension(d, infile.dimensions[d])
1113
1114    for name in infile.variables:
1115        var = infile.variables[name]
1116        outfile.createVariable(name, var.typecode(), var.dimensions)
1117
1118
1119    #Copy the static variables
1120    for name in infile.variables:
1121        if name == 'time' or name == 'stage':
1122            pass
1123        else:
1124            #Copy
1125            outfile.variables[name][:] = infile.variables[name][:]
1126
1127    #Copy selected timesteps
1128    time = infile.variables['time']
1129    stage = infile.variables['stage']
1130
1131    newtime = outfile.variables['time']
1132    newstage = outfile.variables['stage']
1133
1134    if last is None:
1135        last = len(time)
1136
1137    selection = range(first, last, step)
1138    for i, j in enumerate(selection):
1139        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1140        newtime[i] = time[j]
1141        newstage[i,:] = stage[j,:]
1142
1143    #Close
1144    infile.close()
1145    outfile.close()
1146
1147
1148#Get data objects
1149def get_dataobject(domain, mode='w'):
1150    """Return instance of class of given format using filename
1151    """
1152
1153    cls = eval('Data_format_%s' %domain.format)
1154    return cls(domain, mode)
1155
1156#FIXME move into geospatial.  There should only be one method that
1157# reads xya, and writes pts.
1158def xya2pts(basename_in, basename_out=None, verbose=False,
1159            #easting_min=None, easting_max=None,
1160            #northing_min=None, northing_max=None,
1161            stride = 1,
1162            attribute_name = 'elevation',
1163            z_func = None):
1164    """Read points data from ascii (.xya)
1165
1166    Example:
1167
1168              x(m)        y(m)        z(m)
1169         0.00000e+00  0.00000e+00  1.3535000e-01
1170         0.00000e+00  1.40000e-02  1.3535000e-01
1171
1172
1173
1174    Convert to NetCDF pts format which is
1175
1176    points:  (Nx2) Float array
1177    elevation: N Float array
1178
1179    Only lines that contain three numeric values are processed
1180
1181    If z_func is specified, it will be applied to the third field
1182    """
1183
1184    import os
1185    #from Scientific.IO.NetCDF import NetCDFFile
1186    from Numeric import Float, arrayrange, concatenate
1187
1188    root, ext = os.path.splitext(basename_in)
1189
1190    if ext == '': ext = '.xya'
1191
1192    #Get NetCDF
1193    infile = open(root + ext, 'r')  #Open existing xya file for read
1194
1195    if verbose: print 'Reading xya points from %s' %(root + ext)
1196
1197    points = []
1198    attribute = []
1199    for i, line in enumerate(infile.readlines()):
1200
1201        if i % stride != 0: continue
1202
1203        fields = line.split()
1204
1205        try:
1206            assert len(fields) == 3
1207        except:
1208            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1209
1210        try:
1211            x = float( fields[0] )
1212            y = float( fields[1] )
1213            z = float( fields[2] )
1214        except:
1215            continue
1216
1217        points.append( [x, y] )
1218
1219        if callable(z_func):
1220            attribute.append(z_func(z))
1221        else:
1222            attribute.append(z)
1223
1224
1225    #Get output file
1226    if basename_out == None:
1227        ptsname = root + '.pts'
1228    else:
1229        ptsname = basename_out + '.pts'
1230
1231    if verbose: print 'Store to NetCDF file %s' %ptsname
1232    write_ptsfile(ptsname, points, attribute, attribute_name)
1233
1234
1235
1236######Obsoleted by export_points in load_mesh
1237def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1238                  zone=None, xllcorner=None, yllcorner=None):
1239    """Write points and associated attribute to pts (NetCDF) format
1240    """
1241
1242    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1243
1244    from Numeric import Float
1245
1246    if attribute_name is None:
1247        attribute_name = 'attribute'
1248
1249
1250    from Scientific.IO.NetCDF import NetCDFFile
1251
1252    # NetCDF file definition
1253    outfile = NetCDFFile(ptsname, 'w')
1254
1255
1256    #Create new file
1257    outfile.institution = 'Geoscience Australia'
1258    outfile.description = 'NetCDF pts format for compact and '\
1259                          'portable storage of spatial point data'
1260
1261
1262    #Georeferencing
1263    from coordinate_transforms.geo_reference import Geo_reference
1264    if zone is None:
1265        assert xllcorner is None, 'xllcorner must be None'
1266        assert yllcorner is None, 'yllcorner must be None'
1267        Geo_reference().write_NetCDF(outfile)
1268    else:
1269        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1270
1271
1272
1273    outfile.createDimension('number_of_points', len(points))
1274    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1275
1276    # variable definitions
1277    outfile.createVariable('points', Float, ('number_of_points',
1278                                             'number_of_dimensions'))
1279    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1280
1281    # Get handles to the variables
1282    nc_points = outfile.variables['points']
1283    nc_attribute = outfile.variables[attribute_name]
1284
1285    #Store data
1286    nc_points[:, :] = points
1287    nc_attribute[:] = attribute
1288
1289    outfile.close()
1290
1291
1292def dem2pts(basename_in, basename_out=None,
1293            easting_min=None, easting_max=None,
1294            northing_min=None, northing_max=None,
1295            use_cache=False, verbose=False,):
1296    """Read Digitial Elevation model from the following NetCDF format (.dem)
1297
1298    Example:
1299
1300    ncols         3121
1301    nrows         1800
1302    xllcorner     722000
1303    yllcorner     5893000
1304    cellsize      25
1305    NODATA_value  -9999
1306    138.3698 137.4194 136.5062 135.5558 ..........
1307
1308    Convert to NetCDF pts format which is
1309
1310    points:  (Nx2) Float array
1311    elevation: N Float array
1312    """
1313
1314
1315
1316    kwargs = {'basename_out': basename_out,
1317              'easting_min': easting_min,
1318              'easting_max': easting_max,
1319              'northing_min': northing_min,
1320              'northing_max': northing_max,
1321              'verbose': verbose}
1322
1323    if use_cache is True:
1324        from caching import cache
1325        result = cache(_dem2pts, basename_in, kwargs,
1326                       dependencies = [basename_in + '.dem'],
1327                       verbose = verbose)
1328
1329    else:
1330        result = apply(_dem2pts, [basename_in], kwargs)
1331
1332    return result
1333
1334
1335def _dem2pts(basename_in, basename_out=None, verbose=False,
1336            easting_min=None, easting_max=None,
1337            northing_min=None, northing_max=None):
1338    """Read Digitial Elevation model from the following NetCDF format (.dem)
1339
1340    Internal function. See public function dem2pts for details.
1341    """
1342
1343    #FIXME: Can this be written feasibly using write_pts?
1344
1345    import os
1346    from Scientific.IO.NetCDF import NetCDFFile
1347    from Numeric import Float, zeros, reshape, sum
1348
1349    root = basename_in
1350
1351    #Get NetCDF
1352    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1353
1354    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1355
1356    ncols = infile.ncols[0]
1357    nrows = infile.nrows[0]
1358    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1359    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1360    cellsize = infile.cellsize[0]
1361    NODATA_value = infile.NODATA_value[0]
1362    dem_elevation = infile.variables['elevation']
1363
1364    zone = infile.zone[0]
1365    false_easting = infile.false_easting[0]
1366    false_northing = infile.false_northing[0]
1367
1368    #Text strings
1369    projection = infile.projection
1370    datum = infile.datum
1371    units = infile.units
1372
1373
1374    #Get output file
1375    if basename_out == None:
1376        ptsname = root + '.pts'
1377    else:
1378        ptsname = basename_out + '.pts'
1379
1380    if verbose: print 'Store to NetCDF file %s' %ptsname
1381    # NetCDF file definition
1382    outfile = NetCDFFile(ptsname, 'w')
1383
1384    #Create new file
1385    outfile.institution = 'Geoscience Australia'
1386    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1387                      'of spatial point data'
1388    #assign default values
1389    if easting_min is None: easting_min = xllcorner
1390    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1391    if northing_min is None: northing_min = yllcorner
1392    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1393
1394    #compute offsets to update georeferencing
1395    easting_offset = xllcorner - easting_min
1396    northing_offset = yllcorner - northing_min
1397
1398    #Georeferencing
1399    outfile.zone = zone
1400    outfile.xllcorner = easting_min #Easting of lower left corner
1401    outfile.yllcorner = northing_min #Northing of lower left corner
1402    outfile.false_easting = false_easting
1403    outfile.false_northing = false_northing
1404
1405    outfile.projection = projection
1406    outfile.datum = datum
1407    outfile.units = units
1408
1409
1410    #Grid info (FIXME: probably not going to be used, but heck)
1411    outfile.ncols = ncols
1412    outfile.nrows = nrows
1413
1414    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1415    totalnopoints = nrows*ncols
1416
1417    # calculating number of NODATA_values for each row in clipped region
1418    #FIXME: use array operations to do faster
1419    nn = 0
1420    k = 0
1421    i1_0 = 0
1422    j1_0 = 0
1423    thisj = 0
1424    thisi = 0
1425    for i in range(nrows):
1426        y = (nrows-i-1)*cellsize + yllcorner
1427        for j in range(ncols):
1428            x = j*cellsize + xllcorner
1429            if easting_min <= x <= easting_max and \
1430               northing_min <= y <= northing_max:
1431                thisj = j
1432                thisi = i
1433                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1434
1435                if k == 0:
1436                    i1_0 = i
1437                    j1_0 = j
1438                k += 1
1439
1440    index1 = j1_0
1441    index2 = thisj
1442
1443    # dimension definitions
1444    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1445    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1446
1447    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1448    nopoints = clippednopoints-nn
1449
1450    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1451
1452    if verbose and nn > 0:
1453        print 'There are %d values in the elevation' %totalnopoints
1454        print 'There are %d values in the clipped elevation' %clippednopoints
1455        print 'There are %d NODATA_values in the clipped elevation' %nn
1456
1457    outfile.createDimension('number_of_points', nopoints)
1458    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1459
1460    # variable definitions
1461    outfile.createVariable('points', Float, ('number_of_points',
1462                                             'number_of_dimensions'))
1463    outfile.createVariable('elevation', Float, ('number_of_points',))
1464
1465    # Get handles to the variables
1466    points = outfile.variables['points']
1467    elevation = outfile.variables['elevation']
1468
1469    lenv = index2-index1+1
1470    #Store data
1471    global_index = 0
1472    #for i in range(nrows):
1473    for i in range(i1_0,thisi+1,1):
1474        if verbose and i%((nrows+10)/10)==0:
1475            print 'Processing row %d of %d' %(i, nrows)
1476
1477        lower_index = global_index
1478
1479        v = dem_elevation_r[i,index1:index2+1]
1480        no_NODATA = sum(v == NODATA_value)
1481        if no_NODATA > 0:
1482            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1483        else:
1484            newcols = lenv #ncols_in_bounding_box
1485
1486        telev = zeros(newcols, Float)
1487        tpoints = zeros((newcols, 2), Float)
1488
1489        local_index = 0
1490
1491        y = (nrows-i-1)*cellsize + yllcorner
1492        #for j in range(ncols):
1493        for j in range(j1_0,index2+1,1):
1494
1495            x = j*cellsize + xllcorner
1496            if easting_min <= x <= easting_max and \
1497               northing_min <= y <= northing_max and \
1498               dem_elevation_r[i,j] <> NODATA_value:
1499                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1500                telev[local_index] = dem_elevation_r[i, j]
1501                global_index += 1
1502                local_index += 1
1503
1504        upper_index = global_index
1505
1506        if upper_index == lower_index + newcols:
1507            points[lower_index:upper_index, :] = tpoints
1508            elevation[lower_index:upper_index] = telev
1509
1510    assert global_index == nopoints, 'index not equal to number of points'
1511
1512    infile.close()
1513    outfile.close()
1514
1515
1516
1517def _read_hecras_cross_sections(lines):
1518    """Return block of surface lines for each cross section
1519    Starts with SURFACE LINE,
1520    Ends with END CROSS-SECTION
1521    """
1522
1523    points = []
1524
1525    reading_surface = False
1526    for i, line in enumerate(lines):
1527
1528        if len(line.strip()) == 0:    #Ignore blanks
1529            continue
1530
1531        if lines[i].strip().startswith('SURFACE LINE'):
1532            reading_surface = True
1533            continue
1534
1535        if lines[i].strip().startswith('END') and reading_surface:
1536            yield points
1537            reading_surface = False
1538            points = []
1539
1540        if reading_surface:
1541            fields = line.strip().split(',')
1542            easting = float(fields[0])
1543            northing = float(fields[1])
1544            elevation = float(fields[2])
1545            points.append([easting, northing, elevation])
1546
1547
1548
1549
1550def hecras_cross_sections2pts(basename_in,
1551                              basename_out=None,
1552                              verbose=False):
1553    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1554
1555    Example:
1556
1557
1558# RAS export file created on Mon 15Aug2005 11:42
1559# by HEC-RAS Version 3.1.1
1560
1561BEGIN HEADER:
1562  UNITS: METRIC
1563  DTM TYPE: TIN
1564  DTM: v:\1\cit\perth_topo\river_tin
1565  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1566  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1567  MAP PROJECTION: UTM
1568  PROJECTION ZONE: 50
1569  DATUM: AGD66
1570  VERTICAL DATUM:
1571  NUMBER OF REACHES:  19
1572  NUMBER OF CROSS-SECTIONS:  14206
1573END HEADER:
1574
1575
1576Only the SURFACE LINE data of the following form will be utilised
1577
1578  CROSS-SECTION:
1579    STREAM ID:Southern-Wungong
1580    REACH ID:Southern-Wungong
1581    STATION:19040.*
1582    CUT LINE:
1583      405548.671603161 , 6438142.7594925
1584      405734.536092045 , 6438326.10404912
1585      405745.130459356 , 6438331.48627354
1586      405813.89633823 , 6438368.6272789
1587    SURFACE LINE:
1588     405548.67,   6438142.76,   35.37
1589     405552.24,   6438146.28,   35.41
1590     405554.78,   6438148.78,   35.44
1591     405555.80,   6438149.79,   35.44
1592     405559.37,   6438153.31,   35.45
1593     405560.88,   6438154.81,   35.44
1594     405562.93,   6438156.83,   35.42
1595     405566.50,   6438160.35,   35.38
1596     405566.99,   6438160.83,   35.37
1597     ...
1598   END CROSS-SECTION
1599
1600    Convert to NetCDF pts format which is
1601
1602    points:  (Nx2) Float array
1603    elevation: N Float array
1604    """
1605
1606    #FIXME: Can this be written feasibly using write_pts?
1607
1608    import os
1609    from Scientific.IO.NetCDF import NetCDFFile
1610    from Numeric import Float, zeros, reshape
1611
1612    root = basename_in
1613
1614    #Get ASCII file
1615    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1616
1617    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1618
1619    lines = infile.readlines()
1620    infile.close()
1621
1622    if verbose: print 'Converting to pts format'
1623
1624    i = 0
1625    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1626        i += 1
1627
1628    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1629    i += 1
1630
1631    assert lines[i].strip().upper().startswith('UNITS:')
1632    units = lines[i].strip().split()[1]
1633    i += 1
1634
1635    assert lines[i].strip().upper().startswith('DTM TYPE:')
1636    i += 1
1637
1638    assert lines[i].strip().upper().startswith('DTM:')
1639    i += 1
1640
1641    assert lines[i].strip().upper().startswith('STREAM')
1642    i += 1
1643
1644    assert lines[i].strip().upper().startswith('CROSS')
1645    i += 1
1646
1647    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1648    projection = lines[i].strip().split(':')[1]
1649    i += 1
1650
1651    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1652    zone = int(lines[i].strip().split(':')[1])
1653    i += 1
1654
1655    assert lines[i].strip().upper().startswith('DATUM:')
1656    datum = lines[i].strip().split(':')[1]
1657    i += 1
1658
1659    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1660    i += 1
1661
1662    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1663    i += 1
1664
1665    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1666    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1667    i += 1
1668
1669
1670    #Now read all points
1671    points = []
1672    elevation = []
1673    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1674        for k, entry in enumerate(entries):
1675            points.append(entry[:2])
1676            elevation.append(entry[2])
1677
1678
1679    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1680          %(j+1, number_of_cross_sections)
1681    assert j+1 == number_of_cross_sections, msg
1682
1683    #Get output file
1684    if basename_out == None:
1685        ptsname = root + '.pts'
1686    else:
1687        ptsname = basename_out + '.pts'
1688
1689    #FIXME (DSG-ON): use loadASCII export_points_file
1690    if verbose: print 'Store to NetCDF file %s' %ptsname
1691    # NetCDF file definition
1692    outfile = NetCDFFile(ptsname, 'w')
1693
1694    #Create new file
1695    outfile.institution = 'Geoscience Australia'
1696    outfile.description = 'NetCDF pts format for compact and portable ' +\
1697                          'storage of spatial point data derived from HEC-RAS'
1698
1699    #Georeferencing
1700    outfile.zone = zone
1701    outfile.xllcorner = 0.0
1702    outfile.yllcorner = 0.0
1703    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1704    outfile.false_northing = 1000000
1705
1706    outfile.projection = projection
1707    outfile.datum = datum
1708    outfile.units = units
1709
1710
1711    outfile.createDimension('number_of_points', len(points))
1712    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1713
1714    # variable definitions
1715    outfile.createVariable('points', Float, ('number_of_points',
1716                                             'number_of_dimensions'))
1717    outfile.createVariable('elevation', Float, ('number_of_points',))
1718
1719    # Get handles to the variables
1720    outfile.variables['points'][:] = points
1721    outfile.variables['elevation'][:] = elevation
1722
1723    outfile.close()
1724
1725
1726
1727def sww2dem(basename_in, basename_out = None,
1728            quantity = None,
1729            timestep = None,
1730            reduction = None,
1731            cellsize = 10,
1732            NODATA_value = -9999,
1733            easting_min = None,
1734            easting_max = None,
1735            northing_min = None,
1736            northing_max = None,
1737            verbose = False,
1738            origin = None,
1739            datum = 'WGS84',
1740        format = 'ers'):
1741
1742    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1743
1744    Example (ASC):
1745
1746    ncols         3121
1747    nrows         1800
1748    xllcorner     722000
1749    yllcorner     5893000
1750    cellsize      25
1751    NODATA_value  -9999
1752    138.3698 137.4194 136.5062 135.5558 ..........
1753
1754    Also write accompanying file with same basename_in but extension .prj
1755    used to fix the UTM zone, datum, false northings and eastings.
1756
1757    The prj format is assumed to be as
1758
1759    Projection    UTM
1760    Zone          56
1761    Datum         WGS84
1762    Zunits        NO
1763    Units         METERS
1764    Spheroid      WGS84
1765    Xshift        0.0000000000
1766    Yshift        10000000.0000000000
1767    Parameters
1768
1769
1770    The parameter quantity must be the name of an existing quantity or
1771    an expression involving existing quantities. The default is
1772    'elevation'.
1773
1774    if timestep (an index) is given, output quantity at that timestep
1775
1776    if reduction is given use that to reduce quantity over all timesteps.
1777
1778    datum
1779
1780    format can be either 'asc' or 'ers'
1781    """
1782
1783    import sys
1784    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1785    from Numeric import array2string
1786
1787    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1788    from util import apply_expression_to_dictionary
1789
1790    msg = 'Format must be either asc or ers'
1791    assert format.lower() in ['asc', 'ers'], msg
1792
1793
1794    false_easting = 500000
1795    false_northing = 10000000
1796
1797    if quantity is None:
1798        quantity = 'elevation'
1799
1800    if reduction is None:
1801        reduction = max
1802
1803    if basename_out is None:
1804        basename_out = basename_in + '_%s' %quantity
1805
1806    swwfile = basename_in + '.sww'
1807    demfile = basename_out + '.' + format
1808    # Note the use of a .ers extension is optional (write_ermapper_grid will
1809    # deal with either option
1810
1811    #Read sww file
1812    if verbose: print 'Reading from %s' %swwfile
1813    from Scientific.IO.NetCDF import NetCDFFile
1814    fid = NetCDFFile(swwfile)
1815
1816    #Get extent and reference
1817    x = fid.variables['x'][:]
1818    y = fid.variables['y'][:]
1819    volumes = fid.variables['volumes'][:]
1820
1821    number_of_timesteps = fid.dimensions['number_of_timesteps']
1822    number_of_points = fid.dimensions['number_of_points']
1823    if origin is None:
1824
1825        #Get geo_reference
1826        #sww files don't have to have a geo_ref
1827        try:
1828            geo_reference = Geo_reference(NetCDFObject=fid)
1829        except AttributeError, e:
1830            geo_reference = Geo_reference() #Default georef object
1831
1832        xllcorner = geo_reference.get_xllcorner()
1833        yllcorner = geo_reference.get_yllcorner()
1834        zone = geo_reference.get_zone()
1835    else:
1836        zone = origin[0]
1837        xllcorner = origin[1]
1838        yllcorner = origin[2]
1839
1840
1841
1842    #FIXME: Refactor using code from file_function.statistics
1843    #Something like print swwstats(swwname)
1844    if verbose:
1845        x = fid.variables['x'][:]
1846        y = fid.variables['y'][:]
1847        times = fid.variables['time'][:]
1848        print '------------------------------------------------'
1849        print 'Statistics of SWW file:'
1850        print '  Name: %s' %swwfile
1851        print '  Reference:'
1852        print '    Lower left corner: [%f, %f]'\
1853              %(xllcorner, yllcorner)
1854        print '    Start time: %f' %fid.starttime[0]
1855        print '  Extent:'
1856        print '    x [m] in [%f, %f], len(x) == %d'\
1857              %(min(x.flat), max(x.flat), len(x.flat))
1858        print '    y [m] in [%f, %f], len(y) == %d'\
1859              %(min(y.flat), max(y.flat), len(y.flat))
1860        print '    t [s] in [%f, %f], len(t) == %d'\
1861              %(min(times), max(times), len(times))
1862        print '  Quantities [SI units]:'
1863        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1864            q = fid.variables[name][:].flat
1865            print '    %s in [%f, %f]' %(name, min(q), max(q))
1866
1867
1868
1869
1870
1871    # Get quantity and reduce if applicable
1872    if verbose: print 'Processing quantity %s' %quantity
1873
1874    # Turn NetCDF objects into Numeric arrays
1875    quantity_dict = {}
1876    for name in fid.variables.keys():
1877        quantity_dict[name] = fid.variables[name][:]
1878
1879
1880
1881    # Convert quantity expression to quantities found in sww file   
1882    q = apply_expression_to_dictionary(quantity, quantity_dict)
1883
1884
1885
1886    if len(q.shape) == 2:
1887        #q has a time component and needs to be reduced along
1888        #the temporal dimension
1889        if verbose: print 'Reducing quantity %s' %quantity
1890        q_reduced = zeros( number_of_points, Float )
1891
1892        for k in range(number_of_points):
1893            q_reduced[k] = reduction( q[:,k] )
1894
1895        q = q_reduced
1896
1897    #Post condition: Now q has dimension: number_of_points
1898    assert len(q.shape) == 1
1899    assert q.shape[0] == number_of_points
1900
1901
1902    if verbose:
1903        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1904
1905
1906    #Create grid and update xll/yll corner and x,y
1907
1908    #Relative extent
1909    if easting_min is None:
1910        xmin = min(x)
1911    else:
1912        xmin = easting_min - xllcorner
1913
1914    if easting_max is None:
1915        xmax = max(x)
1916    else:
1917        xmax = easting_max - xllcorner
1918
1919    if northing_min is None:
1920        ymin = min(y)
1921    else:
1922        ymin = northing_min - yllcorner
1923
1924    if northing_max is None:
1925        ymax = max(y)
1926    else:
1927        ymax = northing_max - yllcorner
1928
1929
1930
1931    if verbose: print 'Creating grid'
1932    ncols = int((xmax-xmin)/cellsize)+1
1933    nrows = int((ymax-ymin)/cellsize)+1
1934
1935
1936    #New absolute reference and coordinates
1937    newxllcorner = xmin+xllcorner
1938    newyllcorner = ymin+yllcorner
1939
1940    x = x+xllcorner-newxllcorner
1941    y = y+yllcorner-newyllcorner
1942
1943    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1944    assert len(vertex_points.shape) == 2
1945
1946
1947
1948    grid_points = zeros ( (ncols*nrows, 2), Float )
1949
1950
1951    for i in xrange(nrows):
1952        if format.lower() == 'asc':
1953            yg = i*cellsize
1954        else:
1955        #this will flip the order of the y values for ers
1956            yg = (nrows-i)*cellsize
1957
1958        for j in xrange(ncols):
1959            xg = j*cellsize
1960            k = i*ncols + j
1961
1962            grid_points[k,0] = xg
1963            grid_points[k,1] = yg
1964
1965    #Interpolate
1966    #from least_squares import Interpolation
1967    from fit_interpolate.interpolate import Interpolate
1968
1969
1970    interp = Interpolate(vertex_points, volumes, verbose = verbose)
1971
1972
1973
1974    #Interpolate using quantity values
1975    if verbose: print 'Interpolating'
1976    grid_values = interp.interpolate(q, grid_points).flat
1977
1978
1979    if verbose:
1980        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1981                                                      max(grid_values))
1982
1983    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1984    P = interp.mesh.get_boundary_polygon()
1985    outside_indices = outside_polygon(grid_points, P, closed=True)
1986
1987    for i in outside_indices:
1988        grid_values[i] = NODATA_value
1989
1990
1991
1992
1993    if format.lower() == 'ers':
1994        # setup ERS header information
1995        grid_values = reshape(grid_values,(nrows, ncols))
1996        header = {}
1997        header['datum'] = '"' + datum + '"'
1998        # FIXME The use of hardwired UTM and zone number needs to be made optional
1999        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2000        header['projection'] = '"UTM-' + str(zone) + '"'
2001        header['coordinatetype'] = 'EN'
2002        if header['coordinatetype'] == 'LL':
2003            header['longitude'] = str(newxllcorner)
2004            header['latitude'] = str(newyllcorner)
2005        elif header['coordinatetype'] == 'EN':
2006            header['eastings'] = str(newxllcorner)
2007            header['northings'] = str(newyllcorner)
2008        header['nullcellvalue'] = str(NODATA_value)
2009        header['xdimension'] = str(cellsize)
2010        header['ydimension'] = str(cellsize)
2011        header['value'] = '"' + quantity + '"'
2012        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2013
2014
2015        #Write
2016        if verbose: print 'Writing %s' %demfile
2017        import ermapper_grids
2018        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2019
2020        fid.close()
2021    else:
2022        #Write to Ascii format
2023
2024        #Write prj file
2025        prjfile = basename_out + '.prj'
2026
2027        if verbose: print 'Writing %s' %prjfile
2028        prjid = open(prjfile, 'w')
2029        prjid.write('Projection    %s\n' %'UTM')
2030        prjid.write('Zone          %d\n' %zone)
2031        prjid.write('Datum         %s\n' %datum)
2032        prjid.write('Zunits        NO\n')
2033        prjid.write('Units         METERS\n')
2034        prjid.write('Spheroid      %s\n' %datum)
2035        prjid.write('Xshift        %d\n' %false_easting)
2036        prjid.write('Yshift        %d\n' %false_northing)
2037        prjid.write('Parameters\n')
2038        prjid.close()
2039
2040
2041
2042        if verbose: print 'Writing %s' %demfile
2043
2044        ascid = open(demfile, 'w')
2045
2046        ascid.write('ncols         %d\n' %ncols)
2047        ascid.write('nrows         %d\n' %nrows)
2048        ascid.write('xllcorner     %d\n' %newxllcorner)
2049        ascid.write('yllcorner     %d\n' %newyllcorner)
2050        ascid.write('cellsize      %f\n' %cellsize)
2051        ascid.write('NODATA_value  %d\n' %NODATA_value)
2052
2053
2054        #Get bounding polygon from mesh
2055        #P = interp.mesh.get_boundary_polygon()
2056        #inside_indices = inside_polygon(grid_points, P)
2057
2058        for i in range(nrows):
2059            if verbose and i%((nrows+10)/10)==0:
2060                print 'Doing row %d of %d' %(i, nrows)
2061
2062            base_index = (nrows-i-1)*ncols
2063
2064            slice = grid_values[base_index:base_index+ncols]
2065            s = array2string(slice, max_line_width=sys.maxint)
2066            ascid.write(s[1:-1] + '\n')
2067
2068
2069            #print
2070            #for j in range(ncols):
2071            #    index = base_index+j#
2072            #    print grid_values[index],
2073            #    ascid.write('%f ' %grid_values[index])
2074            #ascid.write('\n')
2075
2076        #Close
2077        ascid.close()
2078        fid.close()
2079
2080#Backwards compatibility
2081def sww2asc(basename_in, basename_out = None,
2082            quantity = None,
2083            timestep = None,
2084            reduction = None,
2085            cellsize = 10,
2086            verbose = False,
2087            origin = None):
2088    print 'sww2asc will soon be obsoleted - please use sww2dem'
2089    sww2dem(basename_in,
2090            basename_out = basename_out,
2091            quantity = quantity,
2092            timestep = timestep,
2093            reduction = reduction,
2094            cellsize = cellsize,
2095            verbose = verbose,
2096            origin = origin,
2097        datum = 'WGS84',
2098        format = 'asc')
2099
2100def sww2ers(basename_in, basename_out = None,
2101            quantity = None,
2102            timestep = None,
2103            reduction = None,
2104            cellsize = 10,
2105            verbose = False,
2106            origin = None,
2107            datum = 'WGS84'):
2108    print 'sww2ers will soon be obsoleted - please use sww2dem'
2109    sww2dem(basename_in,
2110            basename_out = basename_out,
2111            quantity = quantity,
2112            timestep = timestep,
2113            reduction = reduction,
2114            cellsize = cellsize,
2115            verbose = verbose,
2116            origin = origin,
2117            datum = datum,
2118            format = 'ers')
2119################################# END COMPATIBILITY ##############
2120
2121
2122
2123def sww2pts(basename_in, basename_out=None,
2124            data_points=None,
2125            quantity=None,
2126            timestep=None,
2127            reduction=None,
2128            NODATA_value=-9999,
2129            verbose=False,
2130            origin=None):
2131            #datum = 'WGS84')
2132
2133
2134    """Read SWW file and convert to interpolated values at selected points
2135
2136    The parameter quantity' must be the name of an existing quantity or
2137    an expression involving existing quantities. The default is
2138    'elevation'.
2139
2140    if timestep (an index) is given, output quantity at that timestep
2141
2142    if reduction is given use that to reduce quantity over all timesteps.
2143
2144    data_points (Nx2 array) give locations of points where quantity is to be computed.
2145   
2146    """
2147
2148    import sys
2149    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2150    from Numeric import array2string
2151
2152    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2153    from util import apply_expression_to_dictionary
2154
2155    from geospatial_data import Geospatial_data
2156
2157    if quantity is None:
2158        quantity = 'elevation'
2159
2160    if reduction is None:
2161        reduction = max
2162
2163    if basename_out is None:
2164        basename_out = basename_in + '_%s' %quantity
2165
2166    swwfile = basename_in + '.sww'
2167    ptsfile = basename_out + '.pts'
2168
2169    # Read sww file
2170    if verbose: print 'Reading from %s' %swwfile
2171    from Scientific.IO.NetCDF import NetCDFFile
2172    fid = NetCDFFile(swwfile)
2173
2174    # Get extent and reference
2175    x = fid.variables['x'][:]
2176    y = fid.variables['y'][:]
2177    volumes = fid.variables['volumes'][:]
2178
2179    number_of_timesteps = fid.dimensions['number_of_timesteps']
2180    number_of_points = fid.dimensions['number_of_points']
2181    if origin is None:
2182
2183        # Get geo_reference
2184        # sww files don't have to have a geo_ref
2185        try:
2186            geo_reference = Geo_reference(NetCDFObject=fid)
2187        except AttributeError, e:
2188            geo_reference = Geo_reference() #Default georef object
2189
2190        xllcorner = geo_reference.get_xllcorner()
2191        yllcorner = geo_reference.get_yllcorner()
2192        zone = geo_reference.get_zone()
2193    else:
2194        zone = origin[0]
2195        xllcorner = origin[1]
2196        yllcorner = origin[2]
2197
2198
2199
2200    # FIXME: Refactor using code from file_function.statistics
2201    # Something like print swwstats(swwname)
2202    if verbose:
2203        x = fid.variables['x'][:]
2204        y = fid.variables['y'][:]
2205        times = fid.variables['time'][:]
2206        print '------------------------------------------------'
2207        print 'Statistics of SWW file:'
2208        print '  Name: %s' %swwfile
2209        print '  Reference:'
2210        print '    Lower left corner: [%f, %f]'\
2211              %(xllcorner, yllcorner)
2212        print '    Start time: %f' %fid.starttime[0]
2213        print '  Extent:'
2214        print '    x [m] in [%f, %f], len(x) == %d'\
2215              %(min(x.flat), max(x.flat), len(x.flat))
2216        print '    y [m] in [%f, %f], len(y) == %d'\
2217              %(min(y.flat), max(y.flat), len(y.flat))
2218        print '    t [s] in [%f, %f], len(t) == %d'\
2219              %(min(times), max(times), len(times))
2220        print '  Quantities [SI units]:'
2221        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2222            q = fid.variables[name][:].flat
2223            print '    %s in [%f, %f]' %(name, min(q), max(q))
2224
2225
2226
2227    # Get quantity and reduce if applicable
2228    if verbose: print 'Processing quantity %s' %quantity
2229
2230    # Turn NetCDF objects into Numeric arrays
2231    quantity_dict = {}
2232    for name in fid.variables.keys():
2233        quantity_dict[name] = fid.variables[name][:]
2234
2235
2236
2237    # Convert quantity expression to quantities found in sww file   
2238    q = apply_expression_to_dictionary(quantity, quantity_dict)
2239
2240
2241
2242    if len(q.shape) == 2:
2243        # q has a time component and needs to be reduced along
2244        # the temporal dimension
2245        if verbose: print 'Reducing quantity %s' %quantity
2246        q_reduced = zeros( number_of_points, Float )
2247
2248        for k in range(number_of_points):
2249            q_reduced[k] = reduction( q[:,k] )
2250
2251        q = q_reduced
2252
2253    # Post condition: Now q has dimension: number_of_points
2254    assert len(q.shape) == 1
2255    assert q.shape[0] == number_of_points
2256
2257
2258    if verbose:
2259        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2260
2261
2262    # Create grid and update xll/yll corner and x,y
2263    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2264    assert len(vertex_points.shape) == 2
2265
2266    # Interpolate
2267    from fit_interpolate.interpolate import Interpolate
2268    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2269
2270    # Interpolate using quantity values
2271    if verbose: print 'Interpolating'
2272    interpolated_values = interp.interpolate(q, data_points).flat
2273
2274
2275    if verbose:
2276        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2277                                                      max(interpolated_values))
2278
2279    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2280    P = interp.mesh.get_boundary_polygon()
2281    outside_indices = outside_polygon(data_points, P, closed=True)
2282
2283    for i in outside_indices:
2284        interpolated_values[i] = NODATA_value
2285
2286
2287    # Store results   
2288    G = Geospatial_data(data_points=data_points,
2289                        attributes=interpolated_values)
2290
2291    G.export_points_file(ptsfile, absolute = True)
2292
2293    fid.close()
2294
2295
2296def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2297                                  use_cache = False,
2298                                  verbose = False):
2299    """Read Digitial Elevation model from the following ASCII format (.asc)
2300
2301    Example:
2302
2303    ncols         3121
2304    nrows         1800
2305    xllcorner     722000
2306    yllcorner     5893000
2307    cellsize      25
2308    NODATA_value  -9999
2309    138.3698 137.4194 136.5062 135.5558 ..........
2310
2311    Convert basename_in + '.asc' to NetCDF format (.dem)
2312    mimicking the ASCII format closely.
2313
2314
2315    An accompanying file with same basename_in but extension .prj must exist
2316    and is used to fix the UTM zone, datum, false northings and eastings.
2317
2318    The prj format is assumed to be as
2319
2320    Projection    UTM
2321    Zone          56
2322    Datum         WGS84
2323    Zunits        NO
2324    Units         METERS
2325    Spheroid      WGS84
2326    Xshift        0.0000000000
2327    Yshift        10000000.0000000000
2328    Parameters
2329    """
2330
2331
2332
2333    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2334
2335    if use_cache is True:
2336        from caching import cache
2337        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2338                       dependencies = [basename_in + '.asc'],
2339                       verbose = verbose)
2340
2341    else:
2342        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2343
2344    return result
2345
2346
2347
2348
2349
2350
2351def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2352                                  verbose = False):
2353    """Read Digitial Elevation model from the following ASCII format (.asc)
2354
2355    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2356    """
2357
2358    import os
2359    from Scientific.IO.NetCDF import NetCDFFile
2360    from Numeric import Float, array
2361
2362    #root, ext = os.path.splitext(basename_in)
2363    root = basename_in
2364
2365    ###########################################
2366    # Read Meta data
2367    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2368    metadatafile = open(root + '.prj')
2369    metalines = metadatafile.readlines()
2370    metadatafile.close()
2371
2372    L = metalines[0].strip().split()
2373    assert L[0].strip().lower() == 'projection'
2374    projection = L[1].strip()                   #TEXT
2375
2376    L = metalines[1].strip().split()
2377    assert L[0].strip().lower() == 'zone'
2378    zone = int(L[1].strip())
2379
2380    L = metalines[2].strip().split()
2381    assert L[0].strip().lower() == 'datum'
2382    datum = L[1].strip()                        #TEXT
2383
2384    L = metalines[3].strip().split()
2385    assert L[0].strip().lower() == 'zunits'     #IGNORE
2386    zunits = L[1].strip()                       #TEXT
2387
2388    L = metalines[4].strip().split()
2389    assert L[0].strip().lower() == 'units'
2390    units = L[1].strip()                        #TEXT
2391
2392    L = metalines[5].strip().split()
2393    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2394    spheroid = L[1].strip()                     #TEXT
2395
2396    L = metalines[6].strip().split()
2397    assert L[0].strip().lower() == 'xshift'
2398    false_easting = float(L[1].strip())
2399
2400    L = metalines[7].strip().split()
2401    assert L[0].strip().lower() == 'yshift'
2402    false_northing = float(L[1].strip())
2403
2404    #print false_easting, false_northing, zone, datum
2405
2406
2407    ###########################################
2408    #Read DEM data
2409
2410    datafile = open(basename_in + '.asc')
2411
2412    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2413    lines = datafile.readlines()
2414    datafile.close()
2415
2416    if verbose: print 'Got', len(lines), ' lines'
2417
2418    ncols = int(lines[0].split()[1].strip())
2419    nrows = int(lines[1].split()[1].strip())
2420    xllcorner = float(lines[2].split()[1].strip())
2421    yllcorner = float(lines[3].split()[1].strip())
2422    cellsize = float(lines[4].split()[1].strip())
2423    NODATA_value = int(lines[5].split()[1].strip())
2424
2425    assert len(lines) == nrows + 6
2426
2427
2428    ##########################################
2429
2430
2431    if basename_out == None:
2432        netcdfname = root + '.dem'
2433    else:
2434        netcdfname = basename_out + '.dem'
2435
2436    if verbose: print 'Store to NetCDF file %s' %netcdfname
2437    # NetCDF file definition
2438    fid = NetCDFFile(netcdfname, 'w')
2439
2440    #Create new file
2441    fid.institution = 'Geoscience Australia'
2442    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2443                      'of spatial point data'
2444
2445    fid.ncols = ncols
2446    fid.nrows = nrows
2447    fid.xllcorner = xllcorner
2448    fid.yllcorner = yllcorner
2449    fid.cellsize = cellsize
2450    fid.NODATA_value = NODATA_value
2451
2452    fid.zone = zone
2453    fid.false_easting = false_easting
2454    fid.false_northing = false_northing
2455    fid.projection = projection
2456    fid.datum = datum
2457    fid.units = units
2458
2459
2460    # dimension definitions
2461    fid.createDimension('number_of_rows', nrows)
2462    fid.createDimension('number_of_columns', ncols)
2463
2464    # variable definitions
2465    fid.createVariable('elevation', Float, ('number_of_rows',
2466                                            'number_of_columns'))
2467
2468    # Get handles to the variables
2469    elevation = fid.variables['elevation']
2470
2471    #Store data
2472    n = len(lines[6:])
2473    for i, line in enumerate(lines[6:]):
2474        fields = line.split()
2475        if verbose and i%((n+10)/10)==0:
2476            print 'Processing row %d of %d' %(i, nrows)
2477
2478        elevation[i, :] = array([float(x) for x in fields])
2479
2480    fid.close()
2481
2482
2483
2484
2485
2486def ferret2sww(basename_in, basename_out = None,
2487               verbose = False,
2488               minlat = None, maxlat = None,
2489               minlon = None, maxlon = None,
2490               mint = None, maxt = None, mean_stage = 0,
2491               origin = None, zscale = 1,
2492               fail_on_NaN = True,
2493               NaN_filler = 0,
2494               elevation = None,
2495               inverted_bathymetry = False
2496               ): #FIXME: Bathymetry should be obtained
2497                                  #from MOST somehow.
2498                                  #Alternatively from elsewhere
2499                                  #or, as a last resort,
2500                                  #specified here.
2501                                  #The value of -100 will work
2502                                  #for the Wollongong tsunami
2503                                  #scenario but is very hacky
2504    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2505    sww format native to pyvolution.
2506
2507    Specify only basename_in and read files of the form
2508    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2509    relative height, x-velocity and y-velocity, respectively.
2510
2511    Also convert latitude and longitude to UTM. All coordinates are
2512    assumed to be given in the GDA94 datum.
2513
2514    min's and max's: If omitted - full extend is used.
2515    To include a value min may equal it, while max must exceed it.
2516    Lat and lon are assuemd to be in decimal degrees
2517
2518    origin is a 3-tuple with geo referenced
2519    UTM coordinates (zone, easting, northing)
2520
2521    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2522    which means that longitude is the fastest
2523    varying dimension (row major order, so to speak)
2524
2525    ferret2sww uses grid points as vertices in a triangular grid
2526    counting vertices from lower left corner upwards, then right
2527    """
2528
2529    import os
2530    from Scientific.IO.NetCDF import NetCDFFile
2531    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2532    from Numeric import allclose, around
2533
2534    precision = Float
2535
2536    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2537
2538    if minlat != None:
2539        assert -90 < minlat < 90 , msg
2540    if maxlat != None:
2541        assert -90 < maxlat < 90 , msg
2542    if minlon != None:
2543        assert -180 < minlon < 180 , msg
2544    if maxlon != None:
2545        assert -180 < maxlon < 180 , msg
2546
2547
2548    #Get NetCDF data
2549    if verbose: print 'Reading files %s_*.nc' %basename_in
2550    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2551    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2552    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2553    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2554
2555    if basename_out is None:
2556        swwname = basename_in + '.sww'
2557    else:
2558        swwname = basename_out + '.sww'
2559
2560#    Get dimensions of file_h
2561    for dimension in file_h.dimensions.keys():
2562        if dimension[:3] == 'LON':
2563            dim_h_longitude = dimension
2564        if dimension[:3] == 'LAT':
2565            dim_h_latitude = dimension
2566        if dimension[:4] == 'TIME':
2567            dim_h_time = dimension
2568
2569#    print 'long:', dim_h_longitude
2570#    print 'lats:', dim_h_latitude
2571#    print 'times:', dim_h_time
2572
2573    times = file_h.variables[dim_h_time]
2574    latitudes = file_h.variables[dim_h_latitude]
2575    longitudes = file_h.variables[dim_h_longitude]
2576   
2577# get dimensions for file_e
2578    for dimension in file_e.dimensions.keys():
2579        if dimension[:3] == 'LON':
2580            dim_e_longitude = dimension
2581        if dimension[:3] == 'LAT':
2582            dim_e_latitude = dimension
2583        if dimension[:4] == 'TIME':
2584            dim_e_time = dimension
2585
2586# get dimensions for file_u
2587    for dimension in file_u.dimensions.keys():
2588        if dimension[:3] == 'LON':
2589            dim_u_longitude = dimension
2590        if dimension[:3] == 'LAT':
2591            dim_u_latitude = dimension
2592        if dimension[:4] == 'TIME':
2593            dim_u_time = dimension
2594           
2595# get dimensions for file_v
2596    for dimension in file_v.dimensions.keys():
2597        if dimension[:3] == 'LON':
2598            dim_v_longitude = dimension
2599        if dimension[:3] == 'LAT':
2600            dim_v_latitude = dimension
2601        if dimension[:4] == 'TIME':
2602            dim_v_time = dimension
2603
2604
2605    #Precision used by most for lat/lon is 4 or 5 decimals
2606    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2607    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2608
2609    #Check that files are compatible
2610    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2611    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2612    assert allclose(latitudes, e_lat)
2613
2614    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2615    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2616    assert allclose(longitudes, e_lon)
2617
2618    if mint == None:
2619        jmin = 0
2620    else:
2621        jmin = searchsorted(times, mint)
2622
2623    if maxt == None:
2624        jmax=len(times)
2625    else:
2626        jmax = searchsorted(times, maxt)
2627
2628    if minlat == None:
2629        kmin=0
2630    else:
2631        kmin = searchsorted(latitudes, minlat)
2632
2633    if maxlat == None:
2634        kmax = len(latitudes)
2635    else:
2636        kmax = searchsorted(latitudes, maxlat)
2637
2638    if minlon == None:
2639        lmin=0
2640    else:
2641        lmin = searchsorted(longitudes, minlon)
2642
2643    if maxlon == None:
2644        lmax = len(longitudes)
2645    else:
2646        lmax = searchsorted(longitudes, maxlon)
2647
2648#    print' j', jmin, jmax
2649    times = times[jmin:jmax]
2650    latitudes = latitudes[kmin:kmax]
2651    longitudes = longitudes[lmin:lmax]
2652
2653
2654    if verbose: print 'cropping'
2655    zname = 'ELEVATION'
2656
2657    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2658    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2659    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2660    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2661
2662    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2663    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2664    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2665    #        from Numeric import asarray
2666    #        elevations=elevations.tolist()
2667    #        elevations.reverse()
2668    #        elevations=asarray(elevations)
2669    #    else:
2670    #        from Numeric import asarray
2671    #        elevations=elevations.tolist()
2672    #        elevations.reverse()
2673    #        elevations=asarray(elevations)
2674    #        'print hmmm'
2675
2676
2677
2678    #Get missing values
2679    nan_ha = file_h.variables['HA'].missing_value[0]
2680    nan_ua = file_u.variables['UA'].missing_value[0]
2681    nan_va = file_v.variables['VA'].missing_value[0]
2682    if hasattr(file_e.variables[zname],'missing_value'):
2683        nan_e  = file_e.variables[zname].missing_value[0]
2684    else:
2685        nan_e = None
2686
2687    #Cleanup
2688    from Numeric import sometrue
2689
2690    missing = (amplitudes == nan_ha)
2691    if sometrue (missing):
2692        if fail_on_NaN:
2693            msg = 'NetCDFFile %s contains missing values'\
2694                  %(basename_in+'_ha.nc')
2695            raise DataMissingValuesError, msg
2696        else:
2697            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2698
2699    missing = (uspeed == nan_ua)
2700    if sometrue (missing):
2701        if fail_on_NaN:
2702            msg = 'NetCDFFile %s contains missing values'\
2703                  %(basename_in+'_ua.nc')
2704            raise DataMissingValuesError, msg
2705        else:
2706            uspeed = uspeed*(missing==0) + missing*NaN_filler
2707
2708    missing = (vspeed == nan_va)
2709    if sometrue (missing):
2710        if fail_on_NaN:
2711            msg = 'NetCDFFile %s contains missing values'\
2712                  %(basename_in+'_va.nc')
2713            raise DataMissingValuesError, msg
2714        else:
2715            vspeed = vspeed*(missing==0) + missing*NaN_filler
2716
2717
2718    missing = (elevations == nan_e)
2719    if sometrue (missing):
2720        if fail_on_NaN:
2721            msg = 'NetCDFFile %s contains missing values'\
2722                  %(basename_in+'_e.nc')
2723            raise DataMissingValuesError, msg
2724        else:
2725            elevations = elevations*(missing==0) + missing*NaN_filler
2726
2727    #######
2728
2729
2730
2731    number_of_times = times.shape[0]
2732    number_of_latitudes = latitudes.shape[0]
2733    number_of_longitudes = longitudes.shape[0]
2734
2735    assert amplitudes.shape[0] == number_of_times
2736    assert amplitudes.shape[1] == number_of_latitudes
2737    assert amplitudes.shape[2] == number_of_longitudes
2738
2739    if verbose:
2740        print '------------------------------------------------'
2741        print 'Statistics:'
2742        print '  Extent (lat/lon):'
2743        print '    lat in [%f, %f], len(lat) == %d'\
2744              %(min(latitudes.flat), max(latitudes.flat),
2745                len(latitudes.flat))
2746        print '    lon in [%f, %f], len(lon) == %d'\
2747              %(min(longitudes.flat), max(longitudes.flat),
2748                len(longitudes.flat))
2749        print '    t in [%f, %f], len(t) == %d'\
2750              %(min(times.flat), max(times.flat), len(times.flat))
2751
2752        q = amplitudes.flat
2753        name = 'Amplitudes (ha) [cm]'
2754        print %s in [%f, %f]' %(name, min(q), max(q))
2755
2756        q = uspeed.flat
2757        name = 'Speeds (ua) [cm/s]'
2758        print %s in [%f, %f]' %(name, min(q), max(q))
2759
2760        q = vspeed.flat
2761        name = 'Speeds (va) [cm/s]'
2762        print %s in [%f, %f]' %(name, min(q), max(q))
2763
2764        q = elevations.flat
2765        name = 'Elevations (e) [m]'
2766        print %s in [%f, %f]' %(name, min(q), max(q))
2767
2768
2769    #print number_of_latitudes, number_of_longitudes
2770    number_of_points = number_of_latitudes*number_of_longitudes
2771    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2772
2773
2774    file_h.close()
2775    file_u.close()
2776    file_v.close()
2777    file_e.close()
2778
2779
2780    # NetCDF file definition
2781    outfile = NetCDFFile(swwname, 'w')
2782
2783    #Create new file
2784    outfile.institution = 'Geoscience Australia'
2785    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2786                          %(basename_in + '_ha.nc',
2787                            basename_in + '_ua.nc',
2788                            basename_in + '_va.nc',
2789                            basename_in + '_e.nc')
2790
2791
2792    #For sww compatibility
2793    outfile.smoothing = 'Yes'
2794    outfile.order = 1
2795
2796    #Start time in seconds since the epoch (midnight 1/1/1970)
2797    outfile.starttime = starttime = times[0]
2798    times = times - starttime  #Store relative times
2799
2800    # dimension definitions
2801    outfile.createDimension('number_of_volumes', number_of_volumes)
2802
2803    outfile.createDimension('number_of_vertices', 3)
2804    outfile.createDimension('number_of_points', number_of_points)
2805
2806
2807    #outfile.createDimension('number_of_timesteps', len(times))
2808    outfile.createDimension('number_of_timesteps', len(times))
2809
2810    # variable definitions
2811    outfile.createVariable('x', precision, ('number_of_points',))
2812    outfile.createVariable('y', precision, ('number_of_points',))
2813    outfile.createVariable('elevation', precision, ('number_of_points',))
2814
2815    #FIXME: Backwards compatibility
2816    outfile.createVariable('z', precision, ('number_of_points',))
2817    #################################
2818
2819    outfile.createVariable('volumes', Int, ('number_of_volumes',
2820                                            'number_of_vertices'))
2821
2822    outfile.createVariable('time', precision,
2823                           ('number_of_timesteps',))
2824
2825    outfile.createVariable('stage', precision,
2826                           ('number_of_timesteps',
2827                            'number_of_points'))
2828
2829    outfile.createVariable('xmomentum', precision,
2830                           ('number_of_timesteps',
2831                            'number_of_points'))
2832
2833    outfile.createVariable('ymomentum', precision,
2834                           ('number_of_timesteps',
2835                            'number_of_points'))
2836
2837
2838    #Store
2839    from coordinate_transforms.redfearn import redfearn
2840    x = zeros(number_of_points, Float)  #Easting
2841    y = zeros(number_of_points, Float)  #Northing
2842
2843
2844    if verbose: print 'Making triangular grid'
2845    #Check zone boundaries
2846    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2847
2848    vertices = {}
2849    i = 0
2850    for k, lat in enumerate(latitudes):       #Y direction
2851        for l, lon in enumerate(longitudes):  #X direction
2852
2853            vertices[l,k] = i
2854
2855            zone, easting, northing = redfearn(lat,lon)
2856
2857            msg = 'Zone boundary crossed at longitude =', lon
2858            #assert zone == refzone, msg
2859            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2860            x[i] = easting
2861            y[i] = northing
2862            i += 1
2863
2864
2865    #Construct 2 triangles per 'rectangular' element
2866    volumes = []
2867    for l in range(number_of_longitudes-1):    #X direction
2868        for k in range(number_of_latitudes-1): #Y direction
2869            v1 = vertices[l,k+1]
2870            v2 = vertices[l,k]
2871            v3 = vertices[l+1,k+1]
2872            v4 = vertices[l+1,k]
2873
2874            volumes.append([v1,v2,v3]) #Upper element
2875            volumes.append([v4,v3,v2]) #Lower element
2876
2877    volumes = array(volumes)
2878
2879    if origin == None:
2880        zone = refzone
2881        xllcorner = min(x)
2882        yllcorner = min(y)
2883    else:
2884        zone = origin[0]
2885        xllcorner = origin[1]
2886        yllcorner = origin[2]
2887
2888
2889    outfile.xllcorner = xllcorner
2890    outfile.yllcorner = yllcorner
2891    outfile.zone = zone
2892
2893
2894    if elevation is not None:
2895        z = elevation
2896    else:
2897        if inverted_bathymetry:
2898            z = -1*elevations
2899        else:
2900            z = elevations
2901    #FIXME: z should be obtained from MOST and passed in here
2902
2903    from Numeric import resize
2904    z = resize(z,outfile.variables['z'][:].shape)
2905    outfile.variables['x'][:] = x - xllcorner
2906    outfile.variables['y'][:] = y - yllcorner
2907    outfile.variables['z'][:] = z             #FIXME HACK
2908    outfile.variables['elevation'][:] = z
2909    outfile.variables['time'][:] = times   #Store time relative
2910    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2911
2912
2913
2914    #Time stepping
2915    stage = outfile.variables['stage']
2916    xmomentum = outfile.variables['xmomentum']
2917    ymomentum = outfile.variables['ymomentum']
2918
2919    if verbose: print 'Converting quantities'
2920    n = len(times)
2921    for j in range(n):
2922        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2923        i = 0
2924        for k in range(number_of_latitudes):      #Y direction
2925            for l in range(number_of_longitudes): #X direction
2926                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2927                stage[j,i] = w
2928                h = w - z[i]
2929                xmomentum[j,i] = uspeed[j,k,l]/100*h
2930                ymomentum[j,i] = vspeed[j,k,l]/100*h
2931                i += 1
2932
2933    #outfile.close()
2934
2935    #FIXME: Refactor using code from file_function.statistics
2936    #Something like print swwstats(swwname)
2937    if verbose:
2938        x = outfile.variables['x'][:]
2939        y = outfile.variables['y'][:]
2940        times = outfile.variables['time'][:]
2941        print '------------------------------------------------'
2942        print 'Statistics of output file:'
2943        print '  Name: %s' %swwname
2944        print '  Reference:'
2945        print '    Lower left corner: [%f, %f]'\
2946              %(xllcorner, yllcorner)
2947        print '    Start time: %f' %starttime
2948        print '  Extent:'
2949        print '    x [m] in [%f, %f], len(x) == %d'\
2950              %(min(x.flat), max(x.flat), len(x.flat))
2951        print '    y [m] in [%f, %f], len(y) == %d'\
2952              %(min(y.flat), max(y.flat), len(y.flat))
2953        print '    t [s] in [%f, %f], len(t) == %d'\
2954              %(min(times), max(times), len(times))
2955        print '  Quantities [SI units]:'
2956        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2957            q = outfile.variables[name][:].flat
2958            print '    %s in [%f, %f]' %(name, min(q), max(q))
2959
2960
2961
2962    outfile.close()
2963
2964
2965
2966
2967
2968def timefile2netcdf(filename, quantity_names = None):
2969    """Template for converting typical text files with time series to
2970    NetCDF tms file.
2971
2972
2973    The file format is assumed to be either two fields separated by a comma:
2974
2975        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2976
2977    E.g
2978
2979      31/08/04 00:00:00, 1.328223 0 0
2980      31/08/04 00:15:00, 1.292912 0 0
2981
2982    will provide a time dependent function f(t) with three attributes
2983
2984    filename is assumed to be the rootname with extenisons .txt and .sww
2985    """
2986
2987    import time, calendar
2988    from Numeric import array
2989    from config import time_format
2990    from utilities.numerical_tools import ensure_numeric
2991
2992
2993
2994    fid = open(filename + '.txt')
2995    line = fid.readline()
2996    fid.close()
2997
2998    fields = line.split(',')
2999    msg = 'File %s must have the format date, value0 value1 value2 ...'
3000    assert len(fields) == 2, msg
3001
3002    try:
3003        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3004    except ValueError:
3005        msg = 'First field in file %s must be' %filename
3006        msg += ' date-time with format %s.\n' %time_format
3007        msg += 'I got %s instead.' %fields[0]
3008        raise DataTimeError, msg
3009
3010
3011    #Split values
3012    values = []
3013    for value in fields[1].split():
3014        values.append(float(value))
3015
3016    q = ensure_numeric(values)
3017
3018    msg = 'ERROR: File must contain at least one independent value'
3019    assert len(q.shape) == 1, msg
3020
3021
3022
3023    #Read times proper
3024    from Numeric import zeros, Float, alltrue
3025    from config import time_format
3026    import time, calendar
3027
3028    fid = open(filename + '.txt')
3029    lines = fid.readlines()
3030    fid.close()
3031
3032    N = len(lines)
3033    d = len(q)
3034
3035    T = zeros(N, Float)       #Time
3036    Q = zeros((N, d), Float)  #Values
3037
3038    for i, line in enumerate(lines):
3039        fields = line.split(',')
3040        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3041
3042        T[i] = realtime - starttime
3043
3044        for j, value in enumerate(fields[1].split()):
3045            Q[i, j] = float(value)
3046
3047    msg = 'File %s must list time as a monotonuosly ' %filename
3048    msg += 'increasing sequence'
3049    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3050
3051
3052    #Create NetCDF file
3053    from Scientific.IO.NetCDF import NetCDFFile
3054
3055    fid = NetCDFFile(filename + '.tms', 'w')
3056
3057
3058    fid.institution = 'Geoscience Australia'
3059    fid.description = 'Time series'
3060
3061
3062    #Reference point
3063    #Start time in seconds since the epoch (midnight 1/1/1970)
3064    #FIXME: Use Georef
3065    fid.starttime = starttime
3066
3067
3068    # dimension definitions
3069    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3070    #fid.createDimension('number_of_vertices', 3)
3071
3072
3073    fid.createDimension('number_of_timesteps', len(T))
3074
3075    fid.createVariable('time', Float, ('number_of_timesteps',))
3076
3077    fid.variables['time'][:] = T
3078
3079    for i in range(Q.shape[1]):
3080        try:
3081            name = quantity_names[i]
3082        except:
3083            name = 'Attribute%d'%i
3084
3085        fid.createVariable(name, Float, ('number_of_timesteps',))
3086        fid.variables[name][:] = Q[:,i]
3087
3088    fid.close()
3089
3090
3091def extent_sww(file_name):
3092    """
3093    Read in an sww file.
3094
3095    Input;
3096    file_name - the sww file
3097
3098    Output;
3099    z - Vector of bed elevation
3100    volumes - Array.  Each row has 3 values, representing
3101    the vertices that define the volume
3102    time - Vector of the times where there is stage information
3103    stage - array with respect to time and vertices (x,y)
3104    """
3105
3106
3107    from Scientific.IO.NetCDF import NetCDFFile
3108
3109    #Check contents
3110    #Get NetCDF
3111    fid = NetCDFFile(file_name, 'r')
3112
3113    # Get the variables
3114    x = fid.variables['x'][:]
3115    y = fid.variables['y'][:]
3116    stage = fid.variables['stage'][:]
3117    #print "stage",stage
3118    #print "stage.shap",stage.shape
3119    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3120    #print "min(stage)",min(stage)
3121
3122    fid.close()
3123
3124    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3125
3126
3127def sww2domain(filename,boundary=None,t=None,\
3128               fail_if_NaN=True,NaN_filler=0\
3129               ,verbose = False,very_verbose = False):
3130    """
3131    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3132
3133    Boundary is not recommended if domain.smooth is not selected, as it
3134    uses unique coordinates, but not unique boundaries. This means that
3135    the boundary file will not be compatable with the coordinates, and will
3136    give a different final boundary, or crash.
3137    """
3138    NaN=9.969209968386869e+036
3139    #initialise NaN.
3140
3141    from Scientific.IO.NetCDF import NetCDFFile
3142    from shallow_water import Domain
3143    from Numeric import asarray, transpose, resize
3144
3145    if verbose: print 'Reading from ', filename
3146    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3147    time = fid.variables['time']       #Timesteps
3148    if t is None:
3149        t = time[-1]
3150    time_interp = get_time_interp(time,t)
3151
3152    # Get the variables as Numeric arrays
3153    x = fid.variables['x'][:]             #x-coordinates of vertices
3154    y = fid.variables['y'][:]             #y-coordinates of vertices
3155    elevation = fid.variables['elevation']     #Elevation
3156    stage = fid.variables['stage']     #Water level
3157    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3158    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3159
3160    starttime = fid.starttime[0]
3161    volumes = fid.variables['volumes'][:] #Connectivity
3162    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3163
3164    conserved_quantities = []
3165    interpolated_quantities = {}
3166    other_quantities = []
3167
3168    # get geo_reference
3169    #sww files don't have to have a geo_ref
3170    try:
3171        geo_reference = Geo_reference(NetCDFObject=fid)
3172    except: #AttributeError, e:
3173        geo_reference = None
3174
3175    if verbose: print '    getting quantities'
3176    for quantity in fid.variables.keys():
3177        dimensions = fid.variables[quantity].dimensions
3178        if 'number_of_timesteps' in dimensions:
3179            conserved_quantities.append(quantity)
3180            interpolated_quantities[quantity]=\
3181                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3182        else: other_quantities.append(quantity)
3183
3184    other_quantities.remove('x')
3185    other_quantities.remove('y')
3186    other_quantities.remove('z')
3187    other_quantities.remove('volumes')
3188
3189    conserved_quantities.remove('time')
3190
3191    if verbose: print '    building domain'
3192    #    From domain.Domain:
3193    #    domain = Domain(coordinates, volumes,\
3194    #                    conserved_quantities = conserved_quantities,\
3195    #                    other_quantities = other_quantities,zone=zone,\
3196    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3197
3198    #   From shallow_water.Domain:
3199    coordinates=coordinates.tolist()
3200    volumes=volumes.tolist()
3201    #FIXME:should this be in mesh?(peter row)
3202    if fid.smoothing == 'Yes': unique = False
3203    else: unique = True
3204    if unique:
3205        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3206
3207
3208    try:
3209        domain = Domain(coordinates, volumes, boundary)
3210    except AssertionError, e:
3211        fid.close()
3212        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3213        raise DataDomainError, msg
3214
3215    if not boundary is None:
3216        domain.boundary = boundary
3217
3218    domain.geo_reference = geo_reference
3219
3220    domain.starttime=float(starttime)+float(t)
3221    domain.time=0.0
3222
3223    for quantity in other_quantities:
3224        try:
3225            NaN = fid.variables[quantity].missing_value
3226        except:
3227            pass #quantity has no missing_value number
3228        X = fid.variables[quantity][:]
3229        if very_verbose:
3230            print '       ',quantity
3231            print '        NaN =',NaN
3232            print '        max(X)'
3233            print '       ',max(X)
3234            print '        max(X)==NaN'
3235            print '       ',max(X)==NaN
3236            print ''
3237        if (max(X)==NaN) or (min(X)==NaN):
3238            if fail_if_NaN:
3239                msg = 'quantity "%s" contains no_data entry'%quantity
3240                raise DataMissingValuesError, msg
3241            else:
3242                data = (X<>NaN)
3243                X = (X*data)+(data==0)*NaN_filler
3244        if unique:
3245            X = resize(X,(len(X)/3,3))
3246        domain.set_quantity(quantity,X)
3247    #
3248    for quantity in conserved_quantities:
3249        try:
3250            NaN = fid.variables[quantity].missing_value
3251        except:
3252            pass #quantity has no missing_value number
3253        X = interpolated_quantities[quantity]
3254        if very_verbose:
3255            print '       ',quantity
3256            print '        NaN =',NaN
3257            print '        max(X)'
3258            print '       ',max(X)
3259            print '        max(X)==NaN'
3260            print '       ',max(X)==NaN
3261            print ''
3262        if (max(X)==NaN) or (min(X)==NaN):
3263            if fail_if_NaN:
3264                msg = 'quantity "%s" contains no_data entry'%quantity
3265                raise DataMissingValuesError, msg
3266            else:
3267                data = (X<>NaN)
3268                X = (X*data)+(data==0)*NaN_filler
3269        if unique:
3270            X = resize(X,(X.shape[0]/3,3))
3271        domain.set_quantity(quantity,X)
3272
3273    fid.close()
3274    return domain
3275
3276def interpolated_quantity(saved_quantity,time_interp):
3277
3278    #given an index and ratio, interpolate quantity with respect to time.
3279    index,ratio = time_interp
3280    Q = saved_quantity
3281    if ratio > 0:
3282        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3283    else:
3284        q = Q[index]
3285    #Return vector of interpolated values
3286    return q
3287
3288def get_time_interp(time,t=None):
3289    #Finds the ratio and index for time interpolation.
3290    #It is borrowed from previous pyvolution code.
3291    if t is None:
3292        t=time[-1]
3293        index = -1
3294        ratio = 0.
3295    else:
3296        T = time
3297        tau = t
3298        index=0
3299        msg = 'Time interval derived from file %s [%s:%s]'\
3300            %('FIXMEfilename', T[0], T[-1])
3301        msg += ' does not match model time: %s' %tau
3302        if tau < time[0]: raise DataTimeError, msg
3303        if tau > time[-1]: raise DataTimeError, msg
3304        while tau > time[index]: index += 1
3305        while tau < time[index]: index -= 1
3306        if tau == time[index]:
3307            #Protect against case where tau == time[-1] (last time)
3308            # - also works in general when tau == time[i]
3309            ratio = 0
3310        else:
3311            #t is now between index and index+1
3312            ratio = (tau - time[index])/(time[index+1] - time[index])
3313    return (index,ratio)
3314
3315
3316def weed(coordinates,volumes,boundary = None):
3317    if type(coordinates)=='array':
3318        coordinates = coordinates.tolist()
3319    if type(volumes)=='array':
3320        volumes = volumes.tolist()
3321
3322    unique = False
3323    point_dict = {}
3324    same_point = {}
3325    for i in range(len(coordinates)):
3326        point = tuple(coordinates[i])
3327        if point_dict.has_key(point):
3328            unique = True
3329            same_point[i]=point
3330            #to change all point i references to point j
3331        else:
3332            point_dict[point]=i
3333            same_point[i]=point
3334
3335    coordinates = []
3336    i = 0
3337    for point in point_dict.keys():
3338        point = tuple(point)
3339        coordinates.append(list(point))
3340        point_dict[point]=i
3341        i+=1
3342
3343
3344    for volume in volumes:
3345        for i in range(len(volume)):
3346            index = volume[i]
3347            if index>-1:
3348                volume[i]=point_dict[same_point[index]]
3349
3350    new_boundary = {}
3351    if not boundary is None:
3352        for segment in boundary.keys():
3353            point0 = point_dict[same_point[segment[0]]]
3354            point1 = point_dict[same_point[segment[1]]]
3355            label = boundary[segment]
3356            #FIXME should the bounday attributes be concaterated
3357            #('exterior, pond') or replaced ('pond')(peter row)
3358
3359            if new_boundary.has_key((point0,point1)):
3360                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3361                                              #+','+label
3362
3363            elif new_boundary.has_key((point1,point0)):
3364                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3365                                              #+','+label
3366            else: new_boundary[(point0,point1)]=label
3367
3368        boundary = new_boundary
3369
3370    return coordinates,volumes,boundary
3371
3372
3373def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3374                 verbose=False):
3375    """Read Digitial Elevation model from the following NetCDF format (.dem)
3376
3377    Example:
3378
3379    ncols         3121
3380    nrows         1800
3381    xllcorner     722000
3382    yllcorner     5893000
3383    cellsize      25
3384    NODATA_value  -9999
3385    138.3698 137.4194 136.5062 135.5558 ..........
3386
3387    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3388    """
3389
3390    import os
3391    from Scientific.IO.NetCDF import NetCDFFile
3392    from Numeric import Float, zeros, sum, reshape, equal
3393
3394    root = basename_in
3395    inname = root + '.dem'
3396
3397    #Open existing netcdf file to read
3398    infile = NetCDFFile(inname, 'r')
3399    if verbose: print 'Reading DEM from %s' %inname
3400
3401    #Read metadata
3402    ncols = infile.ncols[0]
3403    nrows = infile.nrows[0]
3404    xllcorner = infile.xllcorner[0]
3405    yllcorner = infile.yllcorner[0]
3406    cellsize = infile.cellsize[0]
3407    NODATA_value = infile.NODATA_value[0]
3408    zone = infile.zone[0]
3409    false_easting = infile.false_easting[0]
3410    false_northing = infile.false_northing[0]
3411    projection = infile.projection
3412    datum = infile.datum
3413    units = infile.units
3414
3415    dem_elevation = infile.variables['elevation']
3416
3417    #Get output file name
3418    if basename_out == None:
3419        outname = root + '_' + repr(cellsize_new) + '.dem'
3420    else:
3421        outname = basename_out + '.dem'
3422
3423    if verbose: print 'Write decimated NetCDF file to %s' %outname
3424
3425    #Determine some dimensions for decimated grid
3426    (nrows_stencil, ncols_stencil) = stencil.shape
3427    x_offset = ncols_stencil / 2
3428    y_offset = nrows_stencil / 2
3429    cellsize_ratio = int(cellsize_new / cellsize)
3430    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3431    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3432
3433    #Open netcdf file for output
3434    outfile = NetCDFFile(outname, 'w')
3435
3436    #Create new file
3437    outfile.institution = 'Geoscience Australia'
3438    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3439                           'of spatial point data'
3440    #Georeferencing
3441    outfile.zone = zone
3442    outfile.projection = projection
3443    outfile.datum = datum
3444    outfile.units = units
3445
3446    outfile.cellsize = cellsize_new
3447    outfile.NODATA_value = NODATA_value
3448    outfile.false_easting = false_easting
3449    outfile.false_northing = false_northing
3450
3451    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3452    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3453    outfile.ncols = ncols_new
3454    outfile.nrows = nrows_new
3455
3456    # dimension definition
3457    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3458
3459    # variable definition
3460    outfile.createVariable('elevation', Float, ('number_of_points',))
3461
3462    # Get handle to the variable
3463    elevation = outfile.variables['elevation']
3464
3465    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3466
3467    #Store data
3468    global_index = 0
3469    for i in range(nrows_new):
3470        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3471        lower_index = global_index
3472        telev =  zeros(ncols_new, Float)
3473        local_index = 0
3474        trow = i * cellsize_ratio
3475
3476        for j in range(ncols_new):
3477            tcol = j * cellsize_ratio
3478            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3479
3480            #if dem contains 1 or more NODATA_values set value in
3481            #decimated dem to NODATA_value, else compute decimated
3482            #value using stencil
3483            if sum(sum(equal(tmp, NODATA_value))) > 0:
3484                telev[local_index] = NODATA_value
3485            else:
3486                telev[local_index] = sum(sum(tmp * stencil))
3487
3488            global_index += 1
3489            local_index += 1
3490
3491        upper_index = global_index
3492
3493        elevation[lower_index:upper_index] = telev
3494
3495    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3496
3497    infile.close()
3498    outfile.close()
3499
3500
3501
3502
3503def tsh2sww(filename, verbose=False): #test_tsh2sww
3504    """
3505    to check if a tsh/msh file 'looks' good.
3506    """
3507
3508    from shallow_water import Domain
3509    from pmesh2domain import pmesh_to_domain_instance
3510    import time, os
3511    from data_manager import get_dataobject
3512    from os import sep, path
3513    from utilities.numerical_tools import mean
3514
3515    if verbose == True:print 'Creating domain from', filename
3516    domain = pmesh_to_domain_instance(filename, Domain)
3517    if verbose == True:print "Number of triangles = ", len(domain)
3518
3519    domain.smooth = True
3520    domain.format = 'sww'   #Native netcdf visualisation format
3521    file_path, filename = path.split(filename)
3522    filename, ext = path.splitext(filename)
3523    domain.filename = filename
3524    domain.reduction = mean
3525    if verbose == True:print "file_path",file_path
3526    if file_path == "":file_path = "."
3527    domain.set_datadir(file_path)
3528
3529    if verbose == True:
3530        print "Output written to " + domain.get_datadir() + sep + \
3531              domain.filename + "." + domain.format
3532    sww = get_dataobject(domain)
3533    sww.store_connectivity()
3534    sww.store_timestep('stage')
3535
3536
3537def asc_csiro2sww(bath_dir,
3538                  elevation_dir,
3539                  ucur_dir,
3540                  vcur_dir,
3541                  sww_file,
3542                  minlat = None, maxlat = None,
3543                  minlon = None, maxlon = None,
3544                  zscale=1,
3545                  mean_stage = 0,
3546                  fail_on_NaN = True,
3547                  elevation_NaN_filler = 0,
3548                  bath_prefix='ba',
3549                  elevation_prefix='el',
3550                  verbose=False):
3551    """
3552    Produce an sww boundary file, from esri ascii data from CSIRO.
3553
3554    Also convert latitude and longitude to UTM. All coordinates are
3555    assumed to be given in the GDA94 datum.
3556
3557    assume:
3558    All files are in esri ascii format
3559
3560    4 types of information
3561    bathymetry
3562    elevation
3563    u velocity
3564    v velocity
3565
3566    Assumptions
3567    The metadata of all the files is the same
3568    Each type is in a seperate directory
3569    One bath file with extention .000
3570    The time period is less than 24hrs and uniform.
3571    """
3572    from Scientific.IO.NetCDF import NetCDFFile
3573
3574    from coordinate_transforms.redfearn import redfearn
3575
3576    precision = Float # So if we want to change the precision its done here
3577
3578    # go in to the bath dir and load the only file,
3579    bath_files = os.listdir(bath_dir)
3580    #print "bath_files",bath_files
3581
3582    #fixme: make more general?
3583    bath_file = bath_files[0]
3584    bath_dir_file =  bath_dir + os.sep + bath_file
3585    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3586    #print "bath_metadata",bath_metadata
3587    #print "bath_grid",bath_grid
3588
3589    #Use the date.time of the bath file as a basis for
3590    #the start time for other files
3591    base_start = bath_file[-12:]
3592
3593    #go into the elevation dir and load the 000 file
3594    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3595                         + base_start
3596    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3597    #print "elevation_dir_file",elevation_dir_file
3598    #print "elevation_grid", elevation_grid
3599
3600    elevation_files = os.listdir(elevation_dir)
3601    ucur_files = os.listdir(ucur_dir)
3602    vcur_files = os.listdir(vcur_dir)
3603
3604    # the first elevation file should be the
3605    # file with the same base name as the bath data
3606    #print "elevation_files[0]",elevation_files[0]
3607    #print "'el' + base_start",'el' + base_start
3608    assert elevation_files[0] == 'el' + base_start
3609
3610    #assert bath_metadata == elevation_metadata
3611
3612
3613
3614    number_of_latitudes = bath_grid.shape[0]
3615    number_of_longitudes = bath_grid.shape[1]
3616    #number_of_times = len(os.listdir(elevation_dir))
3617    #number_of_points = number_of_latitudes*number_of_longitudes
3618    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3619
3620    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3621                  for x in range(number_of_longitudes)]
3622    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3623                 for y in range(number_of_latitudes)]
3624
3625     # reverse order of lat, so the fist lat represents the first grid row
3626    latitudes.reverse()
3627
3628    #print "latitudes - before _get_min_max_indexes",latitudes
3629    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3630                                                 minlat=minlat, maxlat=maxlat,
3631                                                 minlon=minlon, maxlon=maxlon)
3632
3633
3634    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3635    #print "bath_grid",bath_grid
3636    latitudes = latitudes[kmin:kmax]
3637    longitudes = longitudes[lmin:lmax]
3638    number_of_latitudes = len(latitudes)
3639    number_of_longitudes = len(longitudes)
3640    number_of_times = len(os.listdir(elevation_dir))
3641    number_of_points = number_of_latitudes*number_of_longitudes
3642    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3643    #print "number_of_points",number_of_points
3644
3645    #Work out the times
3646    if len(elevation_files) > 1:
3647        # Assume: The time period is less than 24hrs.
3648        time_period = (int(elevation_files[1][-3:]) - \
3649                      int(elevation_files[0][-3:]))*60*60
3650        times = [x*time_period for x in range(len(elevation_files))]
3651    else:
3652        times = [0.0]
3653    #print "times", times
3654    #print "number_of_latitudes", number_of_latitudes
3655    #print "number_of_longitudes", number_of_longitudes
3656    #print "number_of_times", number_of_times
3657    #print "latitudes", latitudes
3658    #print "longitudes", longitudes
3659
3660
3661    if verbose:
3662        print '------------------------------------------------'
3663        print 'Statistics:'
3664        print '  Extent (lat/lon):'
3665        print '    lat in [%f, %f], len(lat) == %d'\
3666              %(min(latitudes), max(latitudes),
3667                len(latitudes))
3668        print '    lon in [%f, %f], len(lon) == %d'\
3669              %(min(longitudes), max(longitudes),
3670                len(longitudes))
3671        print '    t in [%f, %f], len(t) == %d'\
3672              %(min(times), max(times), len(times))
3673
3674    ######### WRITE THE SWW FILE #############
3675    # NetCDF file definition
3676    outfile = NetCDFFile(sww_file, 'w')
3677
3678    #Create new file
3679    outfile.institution = 'Geoscience Australia'
3680    outfile.description = 'Converted from XXX'
3681
3682
3683    #For sww compatibility
3684    outfile.smoothing = 'Yes'
3685    outfile.order = 1
3686
3687    #Start time in seconds since the epoch (midnight 1/1/1970)
3688    outfile.starttime = starttime = times[0]
3689
3690
3691    # dimension definitions
3692    outfile.createDimension('number_of_volumes', number_of_volumes)
3693
3694    outfile.createDimension('number_of_vertices', 3)
3695    outfile.createDimension('number_of_points', number_of_points)
3696    outfile.createDimension('number_of_timesteps', number_of_times)
3697
3698    # variable definitions
3699    outfile.createVariable('x', precision, ('number_of_points',))
3700    outfile.createVariable('y', precision, ('number_of_points',))
3701    outfile.createVariable('elevation', precision, ('number_of_points',))
3702
3703    #FIXME: Backwards compatibility
3704    outfile.createVariable('z', precision, ('number_of_points',))
3705    #################################
3706
3707    outfile.createVariable('volumes', Int, ('number_of_volumes',
3708                                            'number_of_vertices'))
3709
3710    outfile.createVariable('time', precision,
3711                           ('number_of_timesteps',))
3712
3713    outfile.createVariable('stage', precision,
3714                           ('number_of_timesteps',
3715                            'number_of_points'))
3716
3717    outfile.createVariable('xmomentum', precision,
3718                           ('number_of_timesteps',
3719                            'number_of_points'))
3720
3721    outfile.createVariable('ymomentum', precision,
3722                           ('number_of_timesteps',
3723                            'number_of_points'))
3724
3725    #Store
3726    from coordinate_transforms.redfearn import redfearn
3727    x = zeros(number_of_points, Float)  #Easting
3728    y = zeros(number_of_points, Float)  #Northing
3729
3730    if verbose: print 'Making triangular grid'
3731    #Get zone of 1st point.
3732    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3733
3734    vertices = {}
3735    i = 0
3736    for k, lat in enumerate(latitudes):
3737        for l, lon in enumerate(longitudes):
3738
3739            vertices[l,k] = i
3740
3741            zone, easting, northing = redfearn(lat,lon)
3742
3743            msg = 'Zone boundary crossed at longitude =', lon
3744            #assert zone == refzone, msg
3745            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3746            x[i] = easting
3747            y[i] = northing
3748            i += 1
3749
3750
3751    #Construct 2 triangles per 'rectangular' element
3752    volumes = []
3753    for l in range(number_of_longitudes-1):    #X direction
3754        for k in range(number_of_latitudes-1): #Y direction
3755            v1 = vertices[l,k+1]
3756            v2 = vertices[l,k]
3757            v3 = vertices[l+1,k+1]
3758            v4 = vertices[l+1,k]
3759
3760            #Note, this is different to the ferrit2sww code
3761            #since the order of the lats is reversed.
3762            volumes.append([v1,v3,v2]) #Upper element
3763            volumes.append([v4,v2,v3]) #Lower element
3764
3765    volumes = array(volumes)
3766
3767    geo_ref = Geo_reference(refzone,min(x),min(y))
3768    geo_ref.write_NetCDF(outfile)
3769
3770    # This will put the geo ref in the middle
3771    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3772
3773
3774    if verbose:
3775        print '------------------------------------------------'
3776        print 'More Statistics:'
3777        print '  Extent (/lon):'
3778        print '    x in [%f, %f], len(lat) == %d'\
3779              %(min(x), max(x),
3780                len(x))
3781        print '    y in [%f, %f], len(lon) == %d'\
3782              %(min(y), max(y),
3783                len(y))
3784        print 'geo_ref: ',geo_ref
3785
3786    z = resize(bath_grid,outfile.variables['z'][:].shape)
3787    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3788    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3789    outfile.variables['z'][:] = z
3790    outfile.variables['elevation'][:] = z  #FIXME HACK
3791    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3792
3793    # do this to create an ok sww file.
3794    #outfile.variables['time'][:] = [0]   #Store time relative
3795    #outfile.variables['stage'] = z
3796    # put the next line up in the code after outfile.order = 1
3797    #number_of_times = 1
3798
3799    stage = outfile.variables['stage']
3800    xmomentum = outfile.variables['xmomentum']
3801    ymomentum = outfile.variables['ymomentum']
3802
3803    outfile.variables['time'][:] = times   #Store time relative
3804
3805    if verbose: print 'Converting quantities'
3806    n = number_of_times
3807    for j in range(number_of_times):
3808        # load in files
3809        elevation_meta, elevation_grid = \
3810           _read_asc(elevation_dir + os.sep + elevation_files[j])
3811
3812        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3813        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3814
3815        #print "elevation_grid",elevation_grid
3816        #cut matrix to desired size
3817        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3818        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3819        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3820        #print "elevation_grid",elevation_grid
3821        # handle missing values
3822        missing = (elevation_grid == elevation_meta['NODATA_value'])
3823        if sometrue (missing):
3824            if fail_on_NaN:
3825                msg = 'File %s contains missing values'\
3826                      %(elevation_files[j])
3827                raise DataMissingValuesError, msg
3828            else:
3829                elevation_grid = elevation_grid*(missing==0) + \
3830                                 missing*elevation_NaN_filler
3831
3832
3833        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3834        i = 0
3835        for k in range(number_of_latitudes):      #Y direction
3836            for l in range(number_of_longitudes): #X direction
3837                w = zscale*elevation_grid[k,l] + mean_stage
3838                stage[j,i] = w
3839                h = w - z[i]
3840                xmomentum[j,i] = u_momentum_grid[k,l]*h
3841                ymomentum[j,i] = v_momentum_grid[k,l]*h
3842                i += 1
3843    outfile.close()
3844
3845def _get_min_max_indexes(latitudes,longitudes,
3846                        minlat=None, maxlat=None,
3847                        minlon=None, maxlon=None):
3848    """
3849    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3850    specified with min/max lat/long
3851
3852    Think of the latitudes and longitudes describing a 2d surface.
3853    The area returned is, if possible, just big enough to cover the
3854    inputed max/min area. (This will not be possible if the max/min area
3855    has a section outside of the latitudes/longitudes area.)
3856
3857    assume latitudess & longitudes are sorted,
3858    long - from low to high (west to east, eg 148 - 151)
3859    lat - from high to low (north to south, eg -35 - -38)
3860    """
3861
3862    # reverse order of lat, so it's in ascending order
3863    latitudes.reverse()
3864    largest_lat_index = len(latitudes)-1
3865    #Cut out a smaller extent.
3866    if minlat == None:
3867        lat_min_index = 0
3868    else:
3869        lat_min_index = searchsorted(latitudes, minlat)-1
3870        if lat_min_index <0:
3871            lat_min_index = 0
3872
3873
3874    if maxlat == None:
3875        lat_max_index = largest_lat_index #len(latitudes)
3876    else:
3877        lat_max_index = searchsorted(latitudes, maxlat)
3878        if lat_max_index > largest_lat_index:
3879            lat_max_index = largest_lat_index
3880
3881    if minlon == None:
3882        lon_min_index = 0
3883    else:
3884        lon_min_index = searchsorted(longitudes, minlon)-1
3885        if lon_min_index <0:
3886            lon_min_index = 0
3887
3888    if maxlon == None:
3889        lon_max_index = len(longitudes)
3890    else:
3891        lon_max_index = searchsorted(longitudes, maxlon)
3892
3893    #Take into account that the latitude list was reversed
3894    latitudes.reverse() # Python passes by reference, need to swap back
3895    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3896                                   largest_lat_index - lat_min_index
3897    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3898    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3899
3900    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3901
3902
3903def _read_asc(filename, verbose=False):
3904    """Read esri file from the following ASCII format (.asc)
3905
3906    Example:
3907
3908    ncols         3121
3909    nrows         1800
3910    xllcorner     722000
3911    yllcorner     5893000
3912    cellsize      25
3913    NODATA_value  -9999
3914    138.3698 137.4194 136.5062 135.5558 ..........
3915
3916    """
3917
3918    datafile = open(filename)
3919
3920    if verbose: print 'Reading DEM from %s' %(filename)
3921    lines = datafile.readlines()
3922    datafile.close()
3923
3924    if verbose: print 'Got', len(lines), ' lines'
3925
3926    ncols = int(lines.pop(0).split()[1].strip())
3927    nrows = int(lines.pop(0).split()[1].strip())
3928    xllcorner = float(lines.pop(0).split()[1].strip())
3929    yllcorner = float(lines.pop(0).split()[1].strip())
3930    cellsize = float(lines.pop(0).split()[1].strip())
3931    NODATA_value = float(lines.pop(0).split()[1].strip())
3932
3933    assert len(lines) == nrows
3934
3935    #Store data
3936    grid = []
3937
3938    n = len(lines)
3939    for i, line in enumerate(lines):
3940        cells = line.split()
3941        assert len(cells) == ncols
3942        grid.append(array([float(x) for x in cells]))
3943    grid = array(grid)
3944
3945    return {'xllcorner':xllcorner,
3946            'yllcorner':yllcorner,
3947            'cellsize':cellsize,
3948            'NODATA_value':NODATA_value}, grid
3949
3950
3951# FIXME (Ole): Is this doing anything at all?
3952def sww2timeseries(swwfile,
3953                   gauge_filename,
3954                   gauge_data_outname,
3955                   quantity = None,
3956                   time_min = None,
3957                   time_max = None,
3958                   verbose = False):
3959
3960    """Read SWW file and extract time series for prescribed quantities at
3961    gauge locations.
3962
3963    The gauge locations are defined in gauge_filename. This file should be
3964    in the form: gaugename, easting, northing, and should be stored in a
3965    .csv or .xya file.
3966
3967    Time series data at the gauges can be written to file (for example,
3968    Benfield requested raw data) with the default being no data written.
3969
3970    The parameter quantity must be the name of an existing quantity or
3971    an expression involving existing quantities. The default is
3972    'depth'.
3973
3974    The user can define a list of quantities. The possibilities are
3975    the conserved quantitues of the shallow water wave equation and other
3976    quantities which can be derived from those, i.e.
3977    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3978
3979    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3980    Note, units of momentum are m^2/s and depth is m.
3981
3982    Velocity is absolute momentum divided by depth. (Confirming correct units:
3983    vel = abs mom / depth = (m^2/s)/m = m/s.)
3984
3985    Bearing returns the angle of the velocity vector from North.
3986
3987    If time_min and time_max is given, output plotted in that time range.
3988    The default is to plot the entire range of the time evident in sww file.
3989
3990    The export graphic format in 'png' and will be stored in the same
3991    directory as the input file.
3992    """
3993
3994    if quantity is None: quantity = 'depth'
3995
3996    # extract gauge locations from gauge file
3997
3998    # extract all quantities from swwfile (f = file_function)
3999    if time_min is None: time_min = min(f.get_time())
4000    if time_max is None: time_max = max(f.get_time())
4001
4002    # loop through appropriate range of time
4003    # plot prescribed quantities and export data if requested
4004
4005    #if gauge_data_outname is None:
4006
4007    return
4008
4009#-------------------------------------------------------------
4010if __name__ == "__main__":
4011    pass
4012
Note: See TracBrowser for help on using the repository browser.