source: inundation/pyvolution/data_manager.py @ 2950

Last change on this file since 2950 was 2931, checked in by ole, 19 years ago

Fixed open test file

File size: 113.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class DataMissingValuesError(exceptions.Exception): pass
53class DataFileNotOpenError(exceptions.Exception): pass
54class DataTimeError(exceptions.Exception): pass
55class DataDomainError(exceptions.Exception): pass
56
57
58
59import os
60
61from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
62     searchsorted, zeros, allclose, around
63
64
65from coordinate_transforms.geo_reference import Geo_reference
66
67
68def make_filename(s):
69    """Transform argument string into a suitable filename
70    """
71
72    s = s.strip()
73    s = s.replace(' ', '_')
74    s = s.replace('(', '')
75    s = s.replace(')', '')
76    s = s.replace('__', '_')
77
78    return s
79
80
81def check_dir(path, verbose=None):
82    """Check that specified path exists.
83    If path does not exist it will be created if possible
84
85    USAGE:
86       checkdir(path, verbose):
87
88    ARGUMENTS:
89        path -- Directory
90        verbose -- Flag verbose output (default: None)
91
92    RETURN VALUE:
93        Verified path including trailing separator
94
95    """
96
97    import os, sys
98    import os.path
99
100    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
101        unix = 0
102    else:
103        unix = 1
104
105
106    if path[-1] != os.sep:
107        path = path + os.sep  # Add separator for directories
108
109    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
110    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
111        try:
112            exitcode=os.mkdir(path)
113
114            # Change access rights if possible
115            #
116            if unix:
117                exitcode=os.system('chmod 775 '+path)
118            else:
119                pass  # FIXME: What about acces rights under Windows?
120
121            if verbose: print 'MESSAGE: Directory', path, 'created.'
122
123        except:
124            print 'WARNING: Directory', path, 'could not be created.'
125            if unix:
126                path = '/tmp/'
127            else:
128                path = 'C:'
129
130            print 'Using directory %s instead' %path
131
132    return(path)
133
134
135
136def del_dir(path):
137    """Recursively delete directory path and all its contents
138    """
139
140    import os
141
142    if os.path.isdir(path):
143        for file in os.listdir(path):
144            X = os.path.join(path, file)
145
146
147            if os.path.isdir(X) and not os.path.islink(X):
148                del_dir(X)
149            else:
150                try:
151                    os.remove(X)
152                except:
153                    print "Could not remove file %s" %X
154
155        os.rmdir(path)
156
157
158
159def create_filename(datadir, filename, format, size=None, time=None):
160
161    import os
162    #from config import data_dir
163
164    FN = check_dir(datadir) + filename
165
166    if size is not None:
167        FN += '_size%d' %size
168
169    if time is not None:
170        FN += '_time%.2f' %time
171
172    FN += '.' + format
173    return FN
174
175
176def get_files(datadir, filename, format, size):
177    """Get all file (names) with gven name, size and format
178    """
179
180    import glob
181
182    import os
183    #from config import data_dir
184
185    dir = check_dir(datadir)
186
187    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
188    return glob.glob(pattern)
189
190
191
192#Generic class for storing output to e.g. visualisation or checkpointing
193class Data_format:
194    """Generic interface to data formats
195    """
196
197
198    def __init__(self, domain, extension, mode = 'w'):
199        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
200                                        '''   'w' (write)'''+\
201                                        '''   'r' (read)''' +\
202                                        '''   'a' (append)'''
203
204        #Create filename
205        #self.filename = create_filename(domain.get_datadir(),
206        #domain.get_name(), extension, len(domain))
207
208
209        self.filename = create_filename(domain.get_datadir(),
210            domain.get_name(), extension)
211
212        #print 'F', self.filename
213        self.timestep = 0
214        self.number_of_volumes = len(domain)
215        self.domain = domain
216
217
218        #FIXME: Should we have a general set_precision function?
219
220
221
222#Class for storing output to e.g. visualisation
223class Data_format_sww(Data_format):
224    """Interface to native NetCDF format (.sww) for storing model output
225
226    There are two kinds of data
227
228    1: Constant data: Vertex coordinates and field values. Stored once
229    2: Variable data: Conserved quantities. Stored once per timestep.
230
231    All data is assumed to reside at vertex locations.
232    """
233
234
235    def __init__(self, domain, mode = 'w',\
236                 max_size = 2000000000,
237                 recursion = False):
238        from Scientific.IO.NetCDF import NetCDFFile
239        from Numeric import Int, Float, Float32
240
241        self.precision = Float32 #Use single precision for quantities
242        if hasattr(domain, 'max_size'):
243            self.max_size = domain.max_size #file size max is 2Gig
244        else:
245            self.max_size = max_size
246        self.recursion = recursion
247        self.mode = mode
248
249        Data_format.__init__(self, domain, 'sww', mode)
250
251
252        # NetCDF file definition
253        fid = NetCDFFile(self.filename, mode)
254
255        if mode == 'w':
256
257            #Create new file
258            fid.institution = 'Geoscience Australia'
259            fid.description = 'Output from pyvolution suitable for plotting'
260
261            if domain.smooth:
262                fid.smoothing = 'Yes'
263            else:
264                fid.smoothing = 'No'
265
266            fid.order = domain.default_order
267
268            if hasattr(domain, 'texture'):
269                fid.texture = domain.texture
270        #else:
271            #    fid.texture = 'None'
272
273            #Reference point
274            #Start time in seconds since the epoch (midnight 1/1/1970)
275            #FIXME: Use Georef
276            fid.starttime = domain.starttime
277
278            # dimension definitions
279            fid.createDimension('number_of_volumes', self.number_of_volumes)
280            fid.createDimension('number_of_vertices', 3)
281
282            if domain.smooth is True:
283                fid.createDimension('number_of_points', len(domain.vertexlist))
284            else:
285                fid.createDimension('number_of_points', 3*self.number_of_volumes)
286
287            fid.createDimension('number_of_timesteps', None) #extensible
288
289            # variable definitions
290            fid.createVariable('x', self.precision, ('number_of_points',))
291            fid.createVariable('y', self.precision, ('number_of_points',))
292            fid.createVariable('elevation', self.precision, ('number_of_points',))
293            if domain.geo_reference is not None:
294                domain.geo_reference.write_NetCDF(fid)
295
296            #FIXME: Backwards compatibility
297            fid.createVariable('z', self.precision, ('number_of_points',))
298            #################################
299
300            fid.createVariable('volumes', Int, ('number_of_volumes',
301                                                'number_of_vertices'))
302
303            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
304                                               # close to each other may appear as the same step
305                               ('number_of_timesteps',))
306
307            fid.createVariable('stage', self.precision,
308                               ('number_of_timesteps',
309                                'number_of_points'))
310
311            fid.createVariable('xmomentum', self.precision,
312                               ('number_of_timesteps',
313                                'number_of_points'))
314
315            fid.createVariable('ymomentum', self.precision,
316                               ('number_of_timesteps',
317                                'number_of_points'))
318
319        #Close
320        fid.close()
321
322
323    def store_connectivity(self):
324        """Specialisation of store_connectivity for net CDF format
325
326        Writes x,y,z coordinates of triangles constituting
327        the bed elevation.
328        """
329
330        from Scientific.IO.NetCDF import NetCDFFile
331
332        from Numeric import concatenate, Int
333
334        domain = self.domain
335
336        #Get NetCDF
337        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
338
339        # Get the variables
340        x = fid.variables['x']
341        y = fid.variables['y']
342        z = fid.variables['elevation']
343
344        volumes = fid.variables['volumes']
345
346        # Get X, Y and bed elevation Z
347        Q = domain.quantities['elevation']
348        X,Y,Z,V = Q.get_vertex_values(xy=True,
349                      precision = self.precision)
350
351
352
353        x[:] = X.astype(self.precision)
354        y[:] = Y.astype(self.precision)
355        z[:] = Z.astype(self.precision)
356
357        #FIXME: Backwards compatibility
358        z = fid.variables['z']
359        z[:] = Z.astype(self.precision)
360        ################################
361
362        volumes[:] = V.astype(volumes.typecode())
363
364        #Close
365        fid.close()
366
367    def store_timestep(self, names):
368        """Store time and named quantities to file
369        """
370        from Scientific.IO.NetCDF import NetCDFFile
371        import types
372        from time import sleep
373        from os import stat
374
375        #minimum_allowed_depth = 0.001
376        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
377        from Numeric import choose
378
379        #Get NetCDF
380        retries = 0
381        file_open = False
382        while not file_open and retries < 10:
383            try:
384                fid = NetCDFFile(self.filename, 'a') #Open existing file
385            except IOError:
386                #This could happen if someone was reading the file.
387                #In that case, wait a while and try again
388                msg = 'Warning (store_timestep): File %s could not be opened'\
389                      %self.filename
390                msg += ' - trying step %s again' %self.domain.time
391                print msg
392                retries += 1
393                sleep(1)
394            else:
395                file_open = True
396
397        if not file_open:
398            msg = 'File %s could not be opened for append' %self.filename
399            raise DataFileNotOpenError, msg
400
401
402
403        #Check to see if the file is already too big:
404        time = fid.variables['time']
405        i = len(time)+1
406        file_size = stat(self.filename)[6]
407        file_size_increase =  file_size/i
408        if file_size + file_size_increase > self.max_size*(2**self.recursion):
409            #in order to get the file name and start time correct,
410            #I change the domian.filename and domain.starttime.
411            #This is the only way to do this without changing
412            #other modules (I think).
413
414            #write a filename addon that won't break swollens reader
415            #(10.sww is bad)
416            filename_ext = '_time_%s'%self.domain.time
417            filename_ext = filename_ext.replace('.', '_')
418            #remember the old filename, then give domain a
419            #name with the extension
420            old_domain_filename = self.domain.filename
421            if not self.recursion:
422                self.domain.filename = self.domain.filename+filename_ext
423
424            #change the domain starttime to the current time
425            old_domain_starttime = self.domain.starttime
426            self.domain.starttime = self.domain.time
427
428            #build a new data_structure.
429            next_data_structure=\
430                Data_format_sww(self.domain, mode=self.mode,\
431                                max_size = self.max_size,\
432                                recursion = self.recursion+1)
433            if not self.recursion:
434                print '    file_size = %s'%file_size
435                print '    saving file to %s'%next_data_structure.filename
436            #set up the new data_structure
437            self.domain.writer = next_data_structure
438
439            #FIXME - could be cleaner to use domain.store_timestep etc.
440            next_data_structure.store_connectivity()
441            next_data_structure.store_timestep(names)
442            fid.sync()
443            fid.close()
444
445            #restore the old starttime and filename
446            self.domain.starttime = old_domain_starttime
447            self.domain.filename = old_domain_filename
448        else:
449            self.recursion = False
450            domain = self.domain
451
452            # Get the variables
453            time = fid.variables['time']
454            stage = fid.variables['stage']
455            xmomentum = fid.variables['xmomentum']
456            ymomentum = fid.variables['ymomentum']
457            i = len(time)
458
459            #Store time
460            time[i] = self.domain.time
461
462
463            if type(names) not in [types.ListType, types.TupleType]:
464                names = [names]
465
466            for name in names:
467                # Get quantity
468                Q = domain.quantities[name]
469                A,V = Q.get_vertex_values(xy = False,
470                                          precision = self.precision)
471
472                #FIXME: Make this general (see below)
473                if name == 'stage':
474                    z = fid.variables['elevation']
475                    #print z[:]
476                    #print A-z[:]
477                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
478                    stage[i,:] = A.astype(self.precision)
479                elif name == 'xmomentum':
480                    xmomentum[i,:] = A.astype(self.precision)
481                elif name == 'ymomentum':
482                    ymomentum[i,:] = A.astype(self.precision)
483
484                #As in....
485            #eval( name + '[i,:] = A.astype(self.precision)' )
486            #FIXME: But we need a UNIT test for that before refactoring
487
488
489
490            #Flush and close
491            fid.sync()
492            fid.close()
493
494
495
496#Class for handling checkpoints data
497class Data_format_cpt(Data_format):
498    """Interface to native NetCDF format (.cpt)
499    """
500
501
502    def __init__(self, domain, mode = 'w'):
503        from Scientific.IO.NetCDF import NetCDFFile
504        from Numeric import Int, Float, Float
505
506        self.precision = Float #Use full precision
507
508        Data_format.__init__(self, domain, 'sww', mode)
509
510
511        # NetCDF file definition
512        fid = NetCDFFile(self.filename, mode)
513
514        if mode == 'w':
515            #Create new file
516            fid.institution = 'Geoscience Australia'
517            fid.description = 'Checkpoint data'
518            #fid.smooth = domain.smooth
519            fid.order = domain.default_order
520
521            # dimension definitions
522            fid.createDimension('number_of_volumes', self.number_of_volumes)
523            fid.createDimension('number_of_vertices', 3)
524
525            #Store info at all vertices (no smoothing)
526            fid.createDimension('number_of_points', 3*self.number_of_volumes)
527            fid.createDimension('number_of_timesteps', None) #extensible
528
529            # variable definitions
530
531            #Mesh
532            fid.createVariable('x', self.precision, ('number_of_points',))
533            fid.createVariable('y', self.precision, ('number_of_points',))
534
535
536            fid.createVariable('volumes', Int, ('number_of_volumes',
537                                                'number_of_vertices'))
538
539            fid.createVariable('time', self.precision,
540                               ('number_of_timesteps',))
541
542            #Allocate space for all quantities
543            for name in domain.quantities.keys():
544                fid.createVariable(name, self.precision,
545                                   ('number_of_timesteps',
546                                    'number_of_points'))
547
548        #Close
549        fid.close()
550
551
552    def store_checkpoint(self):
553        """
554        Write x,y coordinates of triangles.
555        Write connectivity (
556        constituting
557        the bed elevation.
558        """
559
560        from Scientific.IO.NetCDF import NetCDFFile
561
562        from Numeric import concatenate
563
564        domain = self.domain
565
566        #Get NetCDF
567        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
568
569        # Get the variables
570        x = fid.variables['x']
571        y = fid.variables['y']
572
573        volumes = fid.variables['volumes']
574
575        # Get X, Y and bed elevation Z
576        Q = domain.quantities['elevation']
577        X,Y,Z,V = Q.get_vertex_values(xy=True,
578                      precision = self.precision)
579
580
581
582        x[:] = X.astype(self.precision)
583        y[:] = Y.astype(self.precision)
584        z[:] = Z.astype(self.precision)
585
586        volumes[:] = V
587
588        #Close
589        fid.close()
590
591
592    def store_timestep(self, name):
593        """Store time and named quantity to file
594        """
595        from Scientific.IO.NetCDF import NetCDFFile
596        from time import sleep
597
598        #Get NetCDF
599        retries = 0
600        file_open = False
601        while not file_open and retries < 10:
602            try:
603                fid = NetCDFFile(self.filename, 'a') #Open existing file
604            except IOError:
605                #This could happen if someone was reading the file.
606                #In that case, wait a while and try again
607                msg = 'Warning (store_timestep): File %s could not be opened'\
608                  %self.filename
609                msg += ' - trying again'
610                print msg
611                retries += 1
612                sleep(1)
613            else:
614                file_open = True
615
616        if not file_open:
617            msg = 'File %s could not be opened for append' %self.filename
618            raise DataFileNotOPenError, msg
619
620
621        domain = self.domain
622
623        # Get the variables
624        time = fid.variables['time']
625        stage = fid.variables['stage']
626        i = len(time)
627
628        #Store stage
629        time[i] = self.domain.time
630
631        # Get quantity
632        Q = domain.quantities[name]
633        A,V = Q.get_vertex_values(xy=False,
634                  precision = self.precision)
635
636        stage[i,:] = A.astype(self.precision)
637
638        #Flush and close
639        fid.sync()
640        fid.close()
641
642
643
644
645
646#Function for storing xya output
647#FIXME Not done yet for this version
648class Data_format_xya(Data_format):
649    """Generic interface to data formats
650    """
651
652
653    def __init__(self, domain, mode = 'w'):
654        from Scientific.IO.NetCDF import NetCDFFile
655        from Numeric import Int, Float, Float32
656
657        self.precision = Float32 #Use single precision
658
659        Data_format.__init__(self, domain, 'xya', mode)
660
661
662
663    #FIXME -This is the old xya format
664    def store_all(self):
665        """Specialisation of store all for xya format
666
667        Writes x,y,z coordinates of triangles constituting
668        the bed elevation.
669        """
670
671        from Numeric import concatenate
672
673        domain = self.domain
674
675        fd = open(self.filename, 'w')
676
677
678        if domain.smooth is True:
679            number_of_points =  len(domain.vertexlist)
680        else:
681            number_of_points = 3*self.number_of_volumes
682
683        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
684
685        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
686                 " # <vertex #> <x> <y> [attributes]" + "\n")
687
688
689        # Get X, Y, bed elevation and friction (index=0,1)
690        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
691                                           indices = (0,1), precision = self.precision)
692
693        bed_eles = A[:,0]
694        fricts = A[:,1]
695
696        # Get stage (index=0)
697        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
698                                       indices = (0,), precision = self.precision)
699
700        stages = B[:,0]
701
702        #<vertex #> <x> <y> [attributes]
703        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
704                                               stages, fricts):
705
706            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
707            fd.write(s)
708
709        #close
710        fd.close()
711
712
713    def store_timestep(self, t, V0, V1, V2):
714        """Store time, water heights (and momentums) to file
715        """
716        pass
717
718
719#Auxiliary
720def write_obj(filename,x,y,z):
721    """Store x,y,z vectors into filename (obj format)
722       Vectors are assumed to have dimension (M,3) where
723       M corresponds to the number elements.
724       triangles are assumed to be disconnected
725
726       The three numbers in each vector correspond to three vertices,
727
728       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
729
730    """
731    #print 'Writing obj to %s' % filename
732
733    import os.path
734
735    root, ext = os.path.splitext(filename)
736    if ext == '.obj':
737        FN = filename
738    else:
739        FN = filename + '.obj'
740
741
742    outfile = open(FN, 'wb')
743    outfile.write("# Triangulation as an obj file\n")
744
745    M, N = x.shape
746    assert N==3  #Assuming three vertices per element
747
748    for i in range(M):
749        for j in range(N):
750            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
751
752    for i in range(M):
753        base = i*N
754        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
755
756    outfile.close()
757
758
759#########################################################
760#Conversion routines
761########################################################
762
763def sww2obj(basefilename, size):
764    """Convert netcdf based data output to obj
765    """
766    from Scientific.IO.NetCDF import NetCDFFile
767
768    from Numeric import Float, zeros
769
770    #Get NetCDF
771    FN = create_filename('.', basefilename, 'sww', size)
772    print 'Reading from ', FN
773    fid = NetCDFFile(FN, 'r')  #Open existing file for read
774
775
776    # Get the variables
777    x = fid.variables['x']
778    y = fid.variables['y']
779    z = fid.variables['elevation']
780    time = fid.variables['time']
781    stage = fid.variables['stage']
782
783    M = size  #Number of lines
784    xx = zeros((M,3), Float)
785    yy = zeros((M,3), Float)
786    zz = zeros((M,3), Float)
787
788    for i in range(M):
789        for j in range(3):
790            xx[i,j] = x[i+j*M]
791            yy[i,j] = y[i+j*M]
792            zz[i,j] = z[i+j*M]
793
794    #Write obj for bathymetry
795    FN = create_filename('.', basefilename, 'obj', size)
796    write_obj(FN,xx,yy,zz)
797
798
799    #Now read all the data with variable information, combine with
800    #x,y info and store as obj
801
802    for k in range(len(time)):
803        t = time[k]
804        print 'Processing timestep %f' %t
805
806        for i in range(M):
807            for j in range(3):
808                zz[i,j] = stage[k,i+j*M]
809
810
811        #Write obj for variable data
812        #FN = create_filename(basefilename, 'obj', size, time=t)
813        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
814        write_obj(FN,xx,yy,zz)
815
816
817def dat2obj(basefilename):
818    """Convert line based data output to obj
819    FIXME: Obsolete?
820    """
821
822    import glob, os
823    from config import data_dir
824
825
826    #Get bathymetry and x,y's
827    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
828
829    from Numeric import zeros, Float
830
831    M = len(lines)  #Number of lines
832    x = zeros((M,3), Float)
833    y = zeros((M,3), Float)
834    z = zeros((M,3), Float)
835
836    ##i = 0
837    for i, line in enumerate(lines):
838        tokens = line.split()
839        values = map(float,tokens)
840
841        for j in range(3):
842            x[i,j] = values[j*3]
843            y[i,j] = values[j*3+1]
844            z[i,j] = values[j*3+2]
845
846        ##i += 1
847
848
849    #Write obj for bathymetry
850    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
851
852
853    #Now read all the data files with variable information, combine with
854    #x,y info
855    #and store as obj
856
857    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
858
859    for filename in files:
860        print 'Processing %s' % filename
861
862        lines = open(data_dir+os.sep+filename,'r').readlines()
863        assert len(lines) == M
864        root, ext = os.path.splitext(filename)
865
866        #Get time from filename
867        i0 = filename.find('_time=')
868        if i0 == -1:
869            #Skip bathymetry file
870            continue
871
872        i0 += 6  #Position where time starts
873        i1 = filename.find('.dat')
874
875        if i1 > i0:
876            t = float(filename[i0:i1])
877        else:
878            raise DataTimeError, 'Hmmmm'
879
880
881
882        ##i = 0
883        for i, line in enumerate(lines):
884            tokens = line.split()
885            values = map(float,tokens)
886
887            for j in range(3):
888                z[i,j] = values[j]
889
890            ##i += 1
891
892        #Write obj for variable data
893        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
894
895
896def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
897    """Read netcdf filename1, pick timesteps first:step:last and save to
898    nettcdf file filename2
899    """
900    from Scientific.IO.NetCDF import NetCDFFile
901
902    #Get NetCDF
903    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
904    outfile = NetCDFFile(filename2, 'w')  #Open new file
905
906
907    #Copy dimensions
908    for d in infile.dimensions:
909        outfile.createDimension(d, infile.dimensions[d])
910
911    for name in infile.variables:
912        var = infile.variables[name]
913        outfile.createVariable(name, var.typecode(), var.dimensions)
914
915
916    #Copy the static variables
917    for name in infile.variables:
918        if name == 'time' or name == 'stage':
919            pass
920        else:
921            #Copy
922            outfile.variables[name][:] = infile.variables[name][:]
923
924    #Copy selected timesteps
925    time = infile.variables['time']
926    stage = infile.variables['stage']
927
928    newtime = outfile.variables['time']
929    newstage = outfile.variables['stage']
930
931    if last is None:
932        last = len(time)
933
934    selection = range(first, last, step)
935    for i, j in enumerate(selection):
936        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
937        newtime[i] = time[j]
938        newstage[i,:] = stage[j,:]
939
940    #Close
941    infile.close()
942    outfile.close()
943
944
945#Get data objects
946def get_dataobject(domain, mode='w'):
947    """Return instance of class of given format using filename
948    """
949
950    cls = eval('Data_format_%s' %domain.format)
951    return cls(domain, mode)
952
953def xya2pts(basename_in, basename_out=None, verbose=False,
954            #easting_min=None, easting_max=None,
955            #northing_min=None, northing_max=None,
956            stride = 1,
957            attribute_name = 'elevation',
958            z_func = None):
959    """Read points data from ascii (.xya)
960
961    Example:
962
963              x(m)        y(m)        z(m)
964         0.00000e+00  0.00000e+00  1.3535000e-01
965         0.00000e+00  1.40000e-02  1.3535000e-01
966
967
968
969    Convert to NetCDF pts format which is
970
971    points:  (Nx2) Float array
972    elevation: N Float array
973
974    Only lines that contain three numeric values are processed
975
976    If z_func is specified, it will be applied to the third field
977    """
978
979    import os
980    #from Scientific.IO.NetCDF import NetCDFFile
981    from Numeric import Float, arrayrange, concatenate
982
983    root, ext = os.path.splitext(basename_in)
984
985    if ext == '': ext = '.xya'
986
987    #Get NetCDF
988    infile = open(root + ext, 'r')  #Open existing xya file for read
989
990    if verbose: print 'Reading xya points from %s' %(root + ext)
991
992    points = []
993    attribute = []
994    for i, line in enumerate(infile.readlines()):
995
996        if i % stride != 0: continue
997
998        fields = line.split()
999
1000        try:
1001            assert len(fields) == 3
1002        except:
1003            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1004
1005        try:
1006            x = float( fields[0] )
1007            y = float( fields[1] )
1008            z = float( fields[2] )
1009        except:
1010            continue
1011
1012        points.append( [x, y] )
1013
1014        if callable(z_func):
1015            attribute.append(z_func(z))
1016        else:
1017            attribute.append(z)
1018
1019
1020    #Get output file
1021    if basename_out == None:
1022        ptsname = root + '.pts'
1023    else:
1024        ptsname = basename_out + '.pts'
1025
1026    if verbose: print 'Store to NetCDF file %s' %ptsname
1027    write_ptsfile(ptsname, points, attribute, attribute_name)
1028
1029
1030
1031######Obsoleted by export_points in load_mesh
1032def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1033                  zone=None, xllcorner=None, yllcorner=None):
1034    """Write points and associated attribute to pts (NetCDF) format
1035    """
1036
1037    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1038
1039    from Numeric import Float
1040
1041    if attribute_name is None:
1042        attribute_name = 'attribute'
1043
1044
1045    from Scientific.IO.NetCDF import NetCDFFile
1046
1047    # NetCDF file definition
1048    outfile = NetCDFFile(ptsname, 'w')
1049
1050
1051    #Create new file
1052    outfile.institution = 'Geoscience Australia'
1053    outfile.description = 'NetCDF pts format for compact and '\
1054                          'portable storage of spatial point data'
1055
1056
1057    #Georeferencing
1058    from coordinate_transforms.geo_reference import Geo_reference
1059    if zone is None:
1060        assert xllcorner is None, 'xllcorner must be None'
1061        assert yllcorner is None, 'yllcorner must be None'
1062        Geo_reference().write_NetCDF(outfile)
1063    else:
1064        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1065
1066
1067
1068    outfile.createDimension('number_of_points', len(points))
1069    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1070
1071    # variable definitions
1072    outfile.createVariable('points', Float, ('number_of_points',
1073                                             'number_of_dimensions'))
1074    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1075
1076    # Get handles to the variables
1077    nc_points = outfile.variables['points']
1078    nc_attribute = outfile.variables[attribute_name]
1079
1080    #Store data
1081    nc_points[:, :] = points
1082    nc_attribute[:] = attribute
1083
1084    outfile.close()
1085
1086
1087def dem2pts(basename_in, basename_out=None,
1088            easting_min=None, easting_max=None,
1089            northing_min=None, northing_max=None,
1090            use_cache=False, verbose=False,):
1091    """Read Digitial Elevation model from the following NetCDF format (.dem)
1092
1093    Example:
1094
1095    ncols         3121
1096    nrows         1800
1097    xllcorner     722000
1098    yllcorner     5893000
1099    cellsize      25
1100    NODATA_value  -9999
1101    138.3698 137.4194 136.5062 135.5558 ..........
1102
1103    Convert to NetCDF pts format which is
1104
1105    points:  (Nx2) Float array
1106    elevation: N Float array
1107    """
1108
1109
1110
1111    kwargs = {'basename_out': basename_out,
1112              'easting_min': easting_min,
1113              'easting_max': easting_max,
1114              'northing_min': northing_min,
1115              'northing_max': northing_max,
1116              'verbose': verbose}
1117
1118    if use_cache is True:
1119        from caching import cache
1120        result = cache(_dem2pts, basename_in, kwargs,
1121                       dependencies = [basename_in + '.dem'],
1122                       verbose = verbose)
1123
1124    else:
1125        result = apply(_dem2pts, [basename_in], kwargs)
1126
1127    return result
1128
1129
1130def _dem2pts(basename_in, basename_out=None, verbose=False,
1131            easting_min=None, easting_max=None,
1132            northing_min=None, northing_max=None):
1133    """Read Digitial Elevation model from the following NetCDF format (.dem)
1134
1135    Internal function. See public function dem2pts for details.
1136    """
1137
1138    #FIXME: Can this be written feasibly using write_pts?
1139
1140    import os
1141    from Scientific.IO.NetCDF import NetCDFFile
1142    from Numeric import Float, zeros, reshape, sum
1143
1144    root = basename_in
1145
1146    #Get NetCDF
1147    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1148
1149    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1150
1151    ncols = infile.ncols[0]
1152    nrows = infile.nrows[0]
1153    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1154    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1155    cellsize = infile.cellsize[0]
1156    NODATA_value = infile.NODATA_value[0]
1157    dem_elevation = infile.variables['elevation']
1158
1159    zone = infile.zone[0]
1160    false_easting = infile.false_easting[0]
1161    false_northing = infile.false_northing[0]
1162
1163    #Text strings
1164    projection = infile.projection
1165    datum = infile.datum
1166    units = infile.units
1167
1168
1169    #Get output file
1170    if basename_out == None:
1171        ptsname = root + '.pts'
1172    else:
1173        ptsname = basename_out + '.pts'
1174
1175    if verbose: print 'Store to NetCDF file %s' %ptsname
1176    # NetCDF file definition
1177    outfile = NetCDFFile(ptsname, 'w')
1178
1179    #Create new file
1180    outfile.institution = 'Geoscience Australia'
1181    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1182                      'of spatial point data'
1183    #assign default values
1184    if easting_min is None: easting_min = xllcorner
1185    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1186    if northing_min is None: northing_min = yllcorner
1187    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1188
1189    #compute offsets to update georeferencing
1190    easting_offset = xllcorner - easting_min
1191    northing_offset = yllcorner - northing_min
1192
1193    #Georeferencing
1194    outfile.zone = zone
1195    outfile.xllcorner = easting_min #Easting of lower left corner
1196    outfile.yllcorner = northing_min #Northing of lower left corner
1197    outfile.false_easting = false_easting
1198    outfile.false_northing = false_northing
1199
1200    outfile.projection = projection
1201    outfile.datum = datum
1202    outfile.units = units
1203
1204
1205    #Grid info (FIXME: probably not going to be used, but heck)
1206    outfile.ncols = ncols
1207    outfile.nrows = nrows
1208
1209    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1210    totalnopoints = nrows*ncols
1211
1212    # calculating number of NODATA_values for each row in clipped region
1213    #FIXME: use array operations to do faster
1214    nn = 0
1215    k = 0
1216    i1_0 = 0
1217    j1_0 = 0
1218    thisj = 0
1219    thisi = 0
1220    for i in range(nrows):
1221        y = (nrows-i-1)*cellsize + yllcorner
1222        for j in range(ncols):
1223            x = j*cellsize + xllcorner
1224            if easting_min <= x <= easting_max and \
1225               northing_min <= y <= northing_max:
1226                thisj = j
1227                thisi = i
1228                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1229
1230                if k == 0:
1231                    i1_0 = i
1232                    j1_0 = j
1233                k += 1
1234
1235    index1 = j1_0
1236    index2 = thisj
1237
1238    # dimension definitions
1239    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1240    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1241
1242    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1243    nopoints = clippednopoints-nn
1244
1245    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1246
1247    if verbose and nn > 0:
1248        print 'There are %d values in the elevation' %totalnopoints
1249        print 'There are %d values in the clipped elevation' %clippednopoints
1250        print 'There are %d NODATA_values in the clipped elevation' %nn
1251
1252    outfile.createDimension('number_of_points', nopoints)
1253    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1254
1255    # variable definitions
1256    outfile.createVariable('points', Float, ('number_of_points',
1257                                             'number_of_dimensions'))
1258    outfile.createVariable('elevation', Float, ('number_of_points',))
1259
1260    # Get handles to the variables
1261    points = outfile.variables['points']
1262    elevation = outfile.variables['elevation']
1263
1264    lenv = index2-index1+1
1265    #Store data
1266    global_index = 0
1267    #for i in range(nrows):
1268    for i in range(i1_0,thisi+1,1):
1269        if verbose and i%((nrows+10)/10)==0:
1270            print 'Processing row %d of %d' %(i, nrows)
1271
1272        lower_index = global_index
1273
1274        v = dem_elevation_r[i,index1:index2+1]
1275        no_NODATA = sum(v == NODATA_value)
1276        if no_NODATA > 0:
1277            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1278        else:
1279            newcols = lenv #ncols_in_bounding_box
1280
1281        telev = zeros(newcols, Float)
1282        tpoints = zeros((newcols, 2), Float)
1283
1284        local_index = 0
1285
1286        y = (nrows-i-1)*cellsize + yllcorner
1287        #for j in range(ncols):
1288        for j in range(j1_0,index2+1,1):
1289
1290            x = j*cellsize + xllcorner
1291            if easting_min <= x <= easting_max and \
1292               northing_min <= y <= northing_max and \
1293               dem_elevation_r[i,j] <> NODATA_value:
1294                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1295                telev[local_index] = dem_elevation_r[i, j]
1296                global_index += 1
1297                local_index += 1
1298
1299        upper_index = global_index
1300
1301        if upper_index == lower_index + newcols:
1302            points[lower_index:upper_index, :] = tpoints
1303            elevation[lower_index:upper_index] = telev
1304
1305    assert global_index == nopoints, 'index not equal to number of points'
1306
1307    infile.close()
1308    outfile.close()
1309
1310
1311
1312def _read_hecras_cross_sections(lines):
1313    """Return block of surface lines for each cross section
1314    Starts with SURFACE LINE,
1315    Ends with END CROSS-SECTION
1316    """
1317
1318    points = []
1319
1320    reading_surface = False
1321    for i, line in enumerate(lines):
1322
1323        if len(line.strip()) == 0:    #Ignore blanks
1324            continue
1325
1326        if lines[i].strip().startswith('SURFACE LINE'):
1327            reading_surface = True
1328            continue
1329
1330        if lines[i].strip().startswith('END') and reading_surface:
1331            yield points
1332            reading_surface = False
1333            points = []
1334
1335        if reading_surface:
1336            fields = line.strip().split(',')
1337            easting = float(fields[0])
1338            northing = float(fields[1])
1339            elevation = float(fields[2])
1340            points.append([easting, northing, elevation])
1341
1342
1343
1344
1345def hecras_cross_sections2pts(basename_in,
1346                              basename_out=None,
1347                              verbose=False):
1348    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1349
1350    Example:
1351
1352
1353# RAS export file created on Mon 15Aug2005 11:42
1354# by HEC-RAS Version 3.1.1
1355
1356BEGIN HEADER:
1357  UNITS: METRIC
1358  DTM TYPE: TIN
1359  DTM: v:\1\cit\perth_topo\river_tin
1360  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1361  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1362  MAP PROJECTION: UTM
1363  PROJECTION ZONE: 50
1364  DATUM: AGD66
1365  VERTICAL DATUM:
1366  NUMBER OF REACHES:  19
1367  NUMBER OF CROSS-SECTIONS:  14206
1368END HEADER:
1369
1370
1371Only the SURFACE LINE data of the following form will be utilised
1372
1373  CROSS-SECTION:
1374    STREAM ID:Southern-Wungong
1375    REACH ID:Southern-Wungong
1376    STATION:19040.*
1377    CUT LINE:
1378      405548.671603161 , 6438142.7594925
1379      405734.536092045 , 6438326.10404912
1380      405745.130459356 , 6438331.48627354
1381      405813.89633823 , 6438368.6272789
1382    SURFACE LINE:
1383     405548.67,   6438142.76,   35.37
1384     405552.24,   6438146.28,   35.41
1385     405554.78,   6438148.78,   35.44
1386     405555.80,   6438149.79,   35.44
1387     405559.37,   6438153.31,   35.45
1388     405560.88,   6438154.81,   35.44
1389     405562.93,   6438156.83,   35.42
1390     405566.50,   6438160.35,   35.38
1391     405566.99,   6438160.83,   35.37
1392     ...
1393   END CROSS-SECTION
1394
1395    Convert to NetCDF pts format which is
1396
1397    points:  (Nx2) Float array
1398    elevation: N Float array
1399    """
1400
1401    #FIXME: Can this be written feasibly using write_pts?
1402
1403    import os
1404    from Scientific.IO.NetCDF import NetCDFFile
1405    from Numeric import Float, zeros, reshape
1406
1407    root = basename_in
1408
1409    #Get ASCII file
1410    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1411
1412    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1413
1414    lines = infile.readlines()
1415    infile.close()
1416
1417    if verbose: print 'Converting to pts format'
1418
1419    i = 0
1420    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1421        i += 1
1422
1423    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('UNITS:')
1427    units = lines[i].strip().split()[1]
1428    i += 1
1429
1430    assert lines[i].strip().upper().startswith('DTM TYPE:')
1431    i += 1
1432
1433    assert lines[i].strip().upper().startswith('DTM:')
1434    i += 1
1435
1436    assert lines[i].strip().upper().startswith('STREAM')
1437    i += 1
1438
1439    assert lines[i].strip().upper().startswith('CROSS')
1440    i += 1
1441
1442    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1443    projection = lines[i].strip().split(':')[1]
1444    i += 1
1445
1446    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1447    zone = int(lines[i].strip().split(':')[1])
1448    i += 1
1449
1450    assert lines[i].strip().upper().startswith('DATUM:')
1451    datum = lines[i].strip().split(':')[1]
1452    i += 1
1453
1454    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1455    i += 1
1456
1457    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1458    i += 1
1459
1460    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1461    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1462    i += 1
1463
1464
1465    #Now read all points
1466    points = []
1467    elevation = []
1468    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1469        for k, entry in enumerate(entries):
1470            points.append(entry[:2])
1471            elevation.append(entry[2])
1472
1473
1474    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1475          %(j+1, number_of_cross_sections)
1476    assert j+1 == number_of_cross_sections, msg
1477
1478    #Get output file
1479    if basename_out == None:
1480        ptsname = root + '.pts'
1481    else:
1482        ptsname = basename_out + '.pts'
1483
1484    #FIXME (DSG-ON): use loadASCII export_points_file
1485    if verbose: print 'Store to NetCDF file %s' %ptsname
1486    # NetCDF file definition
1487    outfile = NetCDFFile(ptsname, 'w')
1488
1489    #Create new file
1490    outfile.institution = 'Geoscience Australia'
1491    outfile.description = 'NetCDF pts format for compact and portable ' +\
1492                          'storage of spatial point data derived from HEC-RAS'
1493
1494    #Georeferencing
1495    outfile.zone = zone
1496    outfile.xllcorner = 0.0
1497    outfile.yllcorner = 0.0
1498    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1499    outfile.false_northing = 1000000
1500
1501    outfile.projection = projection
1502    outfile.datum = datum
1503    outfile.units = units
1504
1505
1506    outfile.createDimension('number_of_points', len(points))
1507    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1508
1509    # variable definitions
1510    outfile.createVariable('points', Float, ('number_of_points',
1511                                             'number_of_dimensions'))
1512    outfile.createVariable('elevation', Float, ('number_of_points',))
1513
1514    # Get handles to the variables
1515    outfile.variables['points'][:] = points
1516    outfile.variables['elevation'][:] = elevation
1517
1518    outfile.close()
1519
1520
1521
1522def sww2dem(basename_in, basename_out = None,
1523            quantity = None,
1524            timestep = None,
1525            reduction = None,
1526            cellsize = 10,
1527            NODATA_value = -9999,
1528            easting_min = None,
1529            easting_max = None,
1530            northing_min = None,
1531            northing_max = None,
1532            verbose = False,
1533            origin = None,
1534            datum = 'WGS84',
1535        format = 'ers'):
1536
1537    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1538
1539    Example (ASC):
1540
1541    ncols         3121
1542    nrows         1800
1543    xllcorner     722000
1544    yllcorner     5893000
1545    cellsize      25
1546    NODATA_value  -9999
1547    138.3698 137.4194 136.5062 135.5558 ..........
1548
1549    Also write accompanying file with same basename_in but extension .prj
1550    used to fix the UTM zone, datum, false northings and eastings.
1551
1552    The prj format is assumed to be as
1553
1554    Projection    UTM
1555    Zone          56
1556    Datum         WGS84
1557    Zunits        NO
1558    Units         METERS
1559    Spheroid      WGS84
1560    Xshift        0.0000000000
1561    Yshift        10000000.0000000000
1562    Parameters
1563
1564
1565    The parameter quantity must be the name of an existing quantity or
1566    an expression involving existing quantities. The default is
1567    'elevation'.
1568
1569    if timestep (an index) is given, output quantity at that timestep
1570
1571    if reduction is given use that to reduce quantity over all timesteps.
1572
1573    datum
1574
1575    format can be either 'asc' or 'ers'
1576    """
1577
1578    import sys
1579    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1580    from Numeric import array2string
1581
1582    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1583    from util import apply_expression_to_dictionary
1584
1585    msg = 'Format must be either asc or ers'
1586    assert format.lower() in ['asc', 'ers'], msg
1587
1588
1589    false_easting = 500000
1590    false_northing = 10000000
1591
1592    if quantity is None:
1593        quantity = 'elevation'
1594
1595    if reduction is None:
1596        reduction = max
1597
1598    if basename_out is None:
1599        basename_out = basename_in + '_%s' %quantity
1600
1601    swwfile = basename_in + '.sww'
1602    demfile = basename_out + '.' + format
1603    # Note the use of a .ers extension is optional (write_ermapper_grid will
1604    # deal with either option
1605
1606    #Read sww file
1607    if verbose: print 'Reading from %s' %swwfile
1608    from Scientific.IO.NetCDF import NetCDFFile
1609    fid = NetCDFFile(swwfile)
1610
1611    #Get extent and reference
1612    x = fid.variables['x'][:]
1613    y = fid.variables['y'][:]
1614    volumes = fid.variables['volumes'][:]
1615
1616    number_of_timesteps = fid.dimensions['number_of_timesteps']
1617    number_of_points = fid.dimensions['number_of_points']
1618    if origin is None:
1619
1620        #Get geo_reference
1621        #sww files don't have to have a geo_ref
1622        try:
1623            geo_reference = Geo_reference(NetCDFObject=fid)
1624        except AttributeError, e:
1625            geo_reference = Geo_reference() #Default georef object
1626
1627        xllcorner = geo_reference.get_xllcorner()
1628        yllcorner = geo_reference.get_yllcorner()
1629        zone = geo_reference.get_zone()
1630    else:
1631        zone = origin[0]
1632        xllcorner = origin[1]
1633        yllcorner = origin[2]
1634
1635
1636
1637    #FIXME: Refactor using code from file_function.statistics
1638    #Something like print swwstats(swwname)
1639    if verbose:
1640        x = fid.variables['x'][:]
1641        y = fid.variables['y'][:]
1642        times = fid.variables['time'][:]
1643        print '------------------------------------------------'
1644        print 'Statistics of SWW file:'
1645        print '  Name: %s' %swwfile
1646        print '  Reference:'
1647        print '    Lower left corner: [%f, %f]'\
1648              %(xllcorner, yllcorner)
1649        print '    Start time: %f' %fid.starttime[0]
1650        print '  Extent:'
1651        print '    x [m] in [%f, %f], len(x) == %d'\
1652              %(min(x.flat), max(x.flat), len(x.flat))
1653        print '    y [m] in [%f, %f], len(y) == %d'\
1654              %(min(y.flat), max(y.flat), len(y.flat))
1655        print '    t [s] in [%f, %f], len(t) == %d'\
1656              %(min(times), max(times), len(times))
1657        print '  Quantities [SI units]:'
1658        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1659            q = fid.variables[name][:].flat
1660            print '    %s in [%f, %f]' %(name, min(q), max(q))
1661
1662
1663
1664
1665
1666    # Get quantity and reduce if applicable
1667    if verbose: print 'Processing quantity %s' %quantity
1668
1669    # Turn NetCDF objects into Numeric arrays
1670    quantity_dict = {}
1671    for name in fid.variables.keys():
1672        quantity_dict[name] = fid.variables[name][:]
1673
1674
1675
1676    # Convert quantity expression to quantities found in sww file   
1677    q = apply_expression_to_dictionary(quantity, quantity_dict)
1678
1679
1680
1681    if len(q.shape) == 2:
1682        #q has a time component and needs to be reduced along
1683        #the temporal dimension
1684        if verbose: print 'Reducing quantity %s' %quantity
1685        q_reduced = zeros( number_of_points, Float )
1686
1687        for k in range(number_of_points):
1688            q_reduced[k] = reduction( q[:,k] )
1689
1690        q = q_reduced
1691
1692    #Post condition: Now q has dimension: number_of_points
1693    assert len(q.shape) == 1
1694    assert q.shape[0] == number_of_points
1695
1696
1697    if verbose:
1698        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1699
1700
1701    #Create grid and update xll/yll corner and x,y
1702
1703    #Relative extent
1704    if easting_min is None:
1705        xmin = min(x)
1706    else:
1707        xmin = easting_min - xllcorner
1708
1709    if easting_max is None:
1710        xmax = max(x)
1711    else:
1712        xmax = easting_max - xllcorner
1713
1714    if northing_min is None:
1715        ymin = min(y)
1716    else:
1717        ymin = northing_min - yllcorner
1718
1719    if northing_max is None:
1720        ymax = max(y)
1721    else:
1722        ymax = northing_max - yllcorner
1723
1724
1725
1726    if verbose: print 'Creating grid'
1727    ncols = int((xmax-xmin)/cellsize)+1
1728    nrows = int((ymax-ymin)/cellsize)+1
1729
1730
1731    #New absolute reference and coordinates
1732    newxllcorner = xmin+xllcorner
1733    newyllcorner = ymin+yllcorner
1734
1735    x = x+xllcorner-newxllcorner
1736    y = y+yllcorner-newyllcorner
1737
1738    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1739    assert len(vertex_points.shape) == 2
1740
1741
1742
1743    grid_points = zeros ( (ncols*nrows, 2), Float )
1744
1745
1746    for i in xrange(nrows):
1747        if format.lower() == 'asc':
1748            yg = i*cellsize
1749        else:
1750        #this will flip the order of the y values for ers
1751            yg = (nrows-i)*cellsize
1752
1753        for j in xrange(ncols):
1754            xg = j*cellsize
1755            k = i*ncols + j
1756
1757            grid_points[k,0] = xg
1758            grid_points[k,1] = yg
1759
1760    #Interpolate
1761    #from least_squares import Interpolation
1762    from fit_interpolate.interpolate import Interpolate
1763
1764
1765    interp = Interpolate(vertex_points, volumes, verbose = verbose)
1766
1767
1768
1769    #Interpolate using quantity values
1770    if verbose: print 'Interpolating'
1771    grid_values = interp.interpolate(q, grid_points).flat
1772
1773
1774    if verbose:
1775        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1776                                                      max(grid_values))
1777
1778    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1779    P = interp.mesh.get_boundary_polygon()
1780    outside_indices = outside_polygon(grid_points, P, closed=True)
1781
1782    for i in outside_indices:
1783        grid_values[i] = NODATA_value
1784
1785
1786
1787
1788    if format.lower() == 'ers':
1789        # setup ERS header information
1790        grid_values = reshape(grid_values,(nrows, ncols))
1791        header = {}
1792        header['datum'] = '"' + datum + '"'
1793        # FIXME The use of hardwired UTM and zone number needs to be made optional
1794        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1795        header['projection'] = '"UTM-' + str(zone) + '"'
1796        header['coordinatetype'] = 'EN'
1797        if header['coordinatetype'] == 'LL':
1798            header['longitude'] = str(newxllcorner)
1799            header['latitude'] = str(newyllcorner)
1800        elif header['coordinatetype'] == 'EN':
1801            header['eastings'] = str(newxllcorner)
1802            header['northings'] = str(newyllcorner)
1803        header['nullcellvalue'] = str(NODATA_value)
1804        header['xdimension'] = str(cellsize)
1805        header['ydimension'] = str(cellsize)
1806        header['value'] = '"' + quantity + '"'
1807        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1808
1809
1810        #Write
1811        if verbose: print 'Writing %s' %demfile
1812        import ermapper_grids
1813        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1814
1815        fid.close()
1816    else:
1817        #Write to Ascii format
1818
1819        #Write prj file
1820        prjfile = basename_out + '.prj'
1821
1822        if verbose: print 'Writing %s' %prjfile
1823        prjid = open(prjfile, 'w')
1824        prjid.write('Projection    %s\n' %'UTM')
1825        prjid.write('Zone          %d\n' %zone)
1826        prjid.write('Datum         %s\n' %datum)
1827        prjid.write('Zunits        NO\n')
1828        prjid.write('Units         METERS\n')
1829        prjid.write('Spheroid      %s\n' %datum)
1830        prjid.write('Xshift        %d\n' %false_easting)
1831        prjid.write('Yshift        %d\n' %false_northing)
1832        prjid.write('Parameters\n')
1833        prjid.close()
1834
1835
1836
1837        if verbose: print 'Writing %s' %demfile
1838
1839        ascid = open(demfile, 'w')
1840
1841        ascid.write('ncols         %d\n' %ncols)
1842        ascid.write('nrows         %d\n' %nrows)
1843        ascid.write('xllcorner     %d\n' %newxllcorner)
1844        ascid.write('yllcorner     %d\n' %newyllcorner)
1845        ascid.write('cellsize      %f\n' %cellsize)
1846        ascid.write('NODATA_value  %d\n' %NODATA_value)
1847
1848
1849        #Get bounding polygon from mesh
1850        #P = interp.mesh.get_boundary_polygon()
1851        #inside_indices = inside_polygon(grid_points, P)
1852
1853        for i in range(nrows):
1854            if verbose and i%((nrows+10)/10)==0:
1855                print 'Doing row %d of %d' %(i, nrows)
1856
1857            base_index = (nrows-i-1)*ncols
1858
1859            slice = grid_values[base_index:base_index+ncols]
1860            s = array2string(slice, max_line_width=sys.maxint)
1861            ascid.write(s[1:-1] + '\n')
1862
1863
1864            #print
1865            #for j in range(ncols):
1866            #    index = base_index+j#
1867            #    print grid_values[index],
1868            #    ascid.write('%f ' %grid_values[index])
1869            #ascid.write('\n')
1870
1871        #Close
1872        ascid.close()
1873        fid.close()
1874
1875#Backwards compatibility
1876def sww2asc(basename_in, basename_out = None,
1877            quantity = None,
1878            timestep = None,
1879            reduction = None,
1880            cellsize = 10,
1881            verbose = False,
1882            origin = None):
1883    print 'sww2asc will soon be obsoleted - please use sww2dem'
1884    sww2dem(basename_in,
1885            basename_out = basename_out,
1886            quantity = quantity,
1887            timestep = timestep,
1888            reduction = reduction,
1889            cellsize = cellsize,
1890            verbose = verbose,
1891            origin = origin,
1892        datum = 'WGS84',
1893        format = 'asc')
1894
1895def sww2ers(basename_in, basename_out = None,
1896            quantity = None,
1897            timestep = None,
1898            reduction = None,
1899            cellsize = 10,
1900            verbose = False,
1901            origin = None,
1902            datum = 'WGS84'):
1903    print 'sww2ers will soon be obsoleted - please use sww2dem'
1904    sww2dem(basename_in,
1905            basename_out = basename_out,
1906            quantity = quantity,
1907            timestep = timestep,
1908            reduction = reduction,
1909            cellsize = cellsize,
1910            verbose = verbose,
1911            origin = origin,
1912            datum = datum,
1913            format = 'ers')
1914################################# END COMPATIBILITY ##############
1915
1916
1917
1918def sww2pts(basename_in, basename_out=None,
1919            data_points=None,
1920            quantity=None,
1921            timestep=None,
1922            reduction=None,
1923            NODATA_value=-9999,
1924            verbose=False,
1925            origin=None):
1926            #datum = 'WGS84')
1927
1928
1929    """Read SWW file and convert to interpolated values at selected points
1930
1931    The parameter quantity' must be the name of an existing quantity or
1932    an expression involving existing quantities. The default is
1933    'elevation'.
1934
1935    if timestep (an index) is given, output quantity at that timestep
1936
1937    if reduction is given use that to reduce quantity over all timesteps.
1938
1939    data_points (Nx2 array) give locations of points where quantity is to be computed.
1940   
1941    """
1942
1943    import sys
1944    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1945    from Numeric import array2string
1946
1947    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1948    from util import apply_expression_to_dictionary
1949
1950    from geospatial_data import Geospatial_data
1951
1952    if quantity is None:
1953        quantity = 'elevation'
1954
1955    if reduction is None:
1956        reduction = max
1957
1958    if basename_out is None:
1959        basename_out = basename_in + '_%s' %quantity
1960
1961    swwfile = basename_in + '.sww'
1962    ptsfile = basename_out + '.pts'
1963
1964    # Read sww file
1965    if verbose: print 'Reading from %s' %swwfile
1966    from Scientific.IO.NetCDF import NetCDFFile
1967    fid = NetCDFFile(swwfile)
1968
1969    # Get extent and reference
1970    x = fid.variables['x'][:]
1971    y = fid.variables['y'][:]
1972    volumes = fid.variables['volumes'][:]
1973
1974    number_of_timesteps = fid.dimensions['number_of_timesteps']
1975    number_of_points = fid.dimensions['number_of_points']
1976    if origin is None:
1977
1978        # Get geo_reference
1979        # sww files don't have to have a geo_ref
1980        try:
1981            geo_reference = Geo_reference(NetCDFObject=fid)
1982        except AttributeError, e:
1983            geo_reference = Geo_reference() #Default georef object
1984
1985        xllcorner = geo_reference.get_xllcorner()
1986        yllcorner = geo_reference.get_yllcorner()
1987        zone = geo_reference.get_zone()
1988    else:
1989        zone = origin[0]
1990        xllcorner = origin[1]
1991        yllcorner = origin[2]
1992
1993
1994
1995    # FIXME: Refactor using code from file_function.statistics
1996    # Something like print swwstats(swwname)
1997    if verbose:
1998        x = fid.variables['x'][:]
1999        y = fid.variables['y'][:]
2000        times = fid.variables['time'][:]
2001        print '------------------------------------------------'
2002        print 'Statistics of SWW file:'
2003        print '  Name: %s' %swwfile
2004        print '  Reference:'
2005        print '    Lower left corner: [%f, %f]'\
2006              %(xllcorner, yllcorner)
2007        print '    Start time: %f' %fid.starttime[0]
2008        print '  Extent:'
2009        print '    x [m] in [%f, %f], len(x) == %d'\
2010              %(min(x.flat), max(x.flat), len(x.flat))
2011        print '    y [m] in [%f, %f], len(y) == %d'\
2012              %(min(y.flat), max(y.flat), len(y.flat))
2013        print '    t [s] in [%f, %f], len(t) == %d'\
2014              %(min(times), max(times), len(times))
2015        print '  Quantities [SI units]:'
2016        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2017            q = fid.variables[name][:].flat
2018            print '    %s in [%f, %f]' %(name, min(q), max(q))
2019
2020
2021
2022    # Get quantity and reduce if applicable
2023    if verbose: print 'Processing quantity %s' %quantity
2024
2025    # Turn NetCDF objects into Numeric arrays
2026    quantity_dict = {}
2027    for name in fid.variables.keys():
2028        quantity_dict[name] = fid.variables[name][:]
2029
2030
2031
2032    # Convert quantity expression to quantities found in sww file   
2033    q = apply_expression_to_dictionary(quantity, quantity_dict)
2034
2035
2036
2037    if len(q.shape) == 2:
2038        # q has a time component and needs to be reduced along
2039        # the temporal dimension
2040        if verbose: print 'Reducing quantity %s' %quantity
2041        q_reduced = zeros( number_of_points, Float )
2042
2043        for k in range(number_of_points):
2044            q_reduced[k] = reduction( q[:,k] )
2045
2046        q = q_reduced
2047
2048    # Post condition: Now q has dimension: number_of_points
2049    assert len(q.shape) == 1
2050    assert q.shape[0] == number_of_points
2051
2052
2053    if verbose:
2054        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2055
2056
2057    # Create grid and update xll/yll corner and x,y
2058    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2059    assert len(vertex_points.shape) == 2
2060
2061    # Interpolate
2062    from fit_interpolate.interpolate import Interpolate
2063    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2064
2065    # Interpolate using quantity values
2066    if verbose: print 'Interpolating'
2067    interpolated_values = interp.interpolate(q, data_points).flat
2068
2069
2070    if verbose:
2071        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2072                                                      max(interpolated_values))
2073
2074    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2075    P = interp.mesh.get_boundary_polygon()
2076    outside_indices = outside_polygon(data_points, P, closed=True)
2077
2078    for i in outside_indices:
2079        interpolated_values[i] = NODATA_value
2080
2081
2082    # Store results   
2083    G = Geospatial_data(data_points=data_points,
2084                        attributes=interpolated_values)
2085
2086    G.export_points_file(ptsfile, absolute = True)
2087
2088    fid.close()
2089
2090
2091def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2092                                  use_cache = False,
2093                                  verbose = False):
2094    """Read Digitial Elevation model from the following ASCII format (.asc)
2095
2096    Example:
2097
2098    ncols         3121
2099    nrows         1800
2100    xllcorner     722000
2101    yllcorner     5893000
2102    cellsize      25
2103    NODATA_value  -9999
2104    138.3698 137.4194 136.5062 135.5558 ..........
2105
2106    Convert basename_in + '.asc' to NetCDF format (.dem)
2107    mimicking the ASCII format closely.
2108
2109
2110    An accompanying file with same basename_in but extension .prj must exist
2111    and is used to fix the UTM zone, datum, false northings and eastings.
2112
2113    The prj format is assumed to be as
2114
2115    Projection    UTM
2116    Zone          56
2117    Datum         WGS84
2118    Zunits        NO
2119    Units         METERS
2120    Spheroid      WGS84
2121    Xshift        0.0000000000
2122    Yshift        10000000.0000000000
2123    Parameters
2124    """
2125
2126
2127
2128    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2129
2130    if use_cache is True:
2131        from caching import cache
2132        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2133                       dependencies = [basename_in + '.asc'],
2134                       verbose = verbose)
2135
2136    else:
2137        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2138
2139    return result
2140
2141
2142
2143
2144
2145
2146def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2147                                  verbose = False):
2148    """Read Digitial Elevation model from the following ASCII format (.asc)
2149
2150    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2151    """
2152
2153    import os
2154    from Scientific.IO.NetCDF import NetCDFFile
2155    from Numeric import Float, array
2156
2157    #root, ext = os.path.splitext(basename_in)
2158    root = basename_in
2159
2160    ###########################################
2161    # Read Meta data
2162    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2163    metadatafile = open(root + '.prj')
2164    metalines = metadatafile.readlines()
2165    metadatafile.close()
2166
2167    L = metalines[0].strip().split()
2168    assert L[0].strip().lower() == 'projection'
2169    projection = L[1].strip()                   #TEXT
2170
2171    L = metalines[1].strip().split()
2172    assert L[0].strip().lower() == 'zone'
2173    zone = int(L[1].strip())
2174
2175    L = metalines[2].strip().split()
2176    assert L[0].strip().lower() == 'datum'
2177    datum = L[1].strip()                        #TEXT
2178
2179    L = metalines[3].strip().split()
2180    assert L[0].strip().lower() == 'zunits'     #IGNORE
2181    zunits = L[1].strip()                       #TEXT
2182
2183    L = metalines[4].strip().split()
2184    assert L[0].strip().lower() == 'units'
2185    units = L[1].strip()                        #TEXT
2186
2187    L = metalines[5].strip().split()
2188    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2189    spheroid = L[1].strip()                     #TEXT
2190
2191    L = metalines[6].strip().split()
2192    assert L[0].strip().lower() == 'xshift'
2193    false_easting = float(L[1].strip())
2194
2195    L = metalines[7].strip().split()
2196    assert L[0].strip().lower() == 'yshift'
2197    false_northing = float(L[1].strip())
2198
2199    #print false_easting, false_northing, zone, datum
2200
2201
2202    ###########################################
2203    #Read DEM data
2204
2205    datafile = open(basename_in + '.asc')
2206
2207    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2208    lines = datafile.readlines()
2209    datafile.close()
2210
2211    if verbose: print 'Got', len(lines), ' lines'
2212
2213    ncols = int(lines[0].split()[1].strip())
2214    nrows = int(lines[1].split()[1].strip())
2215    xllcorner = float(lines[2].split()[1].strip())
2216    yllcorner = float(lines[3].split()[1].strip())
2217    cellsize = float(lines[4].split()[1].strip())
2218    NODATA_value = int(lines[5].split()[1].strip())
2219
2220    assert len(lines) == nrows + 6
2221
2222
2223    ##########################################
2224
2225
2226    if basename_out == None:
2227        netcdfname = root + '.dem'
2228    else:
2229        netcdfname = basename_out + '.dem'
2230
2231    if verbose: print 'Store to NetCDF file %s' %netcdfname
2232    # NetCDF file definition
2233    fid = NetCDFFile(netcdfname, 'w')
2234
2235    #Create new file
2236    fid.institution = 'Geoscience Australia'
2237    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2238                      'of spatial point data'
2239
2240    fid.ncols = ncols
2241    fid.nrows = nrows
2242    fid.xllcorner = xllcorner
2243    fid.yllcorner = yllcorner
2244    fid.cellsize = cellsize
2245    fid.NODATA_value = NODATA_value
2246
2247    fid.zone = zone
2248    fid.false_easting = false_easting
2249    fid.false_northing = false_northing
2250    fid.projection = projection
2251    fid.datum = datum
2252    fid.units = units
2253
2254
2255    # dimension definitions
2256    fid.createDimension('number_of_rows', nrows)
2257    fid.createDimension('number_of_columns', ncols)
2258
2259    # variable definitions
2260    fid.createVariable('elevation', Float, ('number_of_rows',
2261                                            'number_of_columns'))
2262
2263    # Get handles to the variables
2264    elevation = fid.variables['elevation']
2265
2266    #Store data
2267    n = len(lines[6:])
2268    for i, line in enumerate(lines[6:]):
2269        fields = line.split()
2270        if verbose and i%((n+10)/10)==0:
2271            print 'Processing row %d of %d' %(i, nrows)
2272
2273        elevation[i, :] = array([float(x) for x in fields])
2274
2275    fid.close()
2276
2277
2278
2279
2280
2281def ferret2sww(basename_in, basename_out = None,
2282               verbose = False,
2283               minlat = None, maxlat = None,
2284               minlon = None, maxlon = None,
2285               mint = None, maxt = None, mean_stage = 0,
2286               origin = None, zscale = 1,
2287               fail_on_NaN = True,
2288               NaN_filler = 0,
2289               elevation = None,
2290               inverted_bathymetry = False
2291               ): #FIXME: Bathymetry should be obtained
2292                                  #from MOST somehow.
2293                                  #Alternatively from elsewhere
2294                                  #or, as a last resort,
2295                                  #specified here.
2296                                  #The value of -100 will work
2297                                  #for the Wollongong tsunami
2298                                  #scenario but is very hacky
2299    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2300    sww format native to pyvolution.
2301
2302    Specify only basename_in and read files of the form
2303    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2304    relative height, x-velocity and y-velocity, respectively.
2305
2306    Also convert latitude and longitude to UTM. All coordinates are
2307    assumed to be given in the GDA94 datum.
2308
2309    min's and max's: If omitted - full extend is used.
2310    To include a value min may equal it, while max must exceed it.
2311    Lat and lon are assuemd to be in decimal degrees
2312
2313    origin is a 3-tuple with geo referenced
2314    UTM coordinates (zone, easting, northing)
2315
2316    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2317    which means that longitude is the fastest
2318    varying dimension (row major order, so to speak)
2319
2320    ferret2sww uses grid points as vertices in a triangular grid
2321    counting vertices from lower left corner upwards, then right
2322    """
2323
2324    import os
2325    from Scientific.IO.NetCDF import NetCDFFile
2326    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2327    from Numeric import allclose, around
2328
2329    precision = Float
2330
2331    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2332
2333    if minlat != None:
2334        assert -90 < minlat < 90 , msg
2335    if maxlat != None:
2336        assert -90 < maxlat < 90 , msg
2337    if minlon != None:
2338        assert -180 < minlon < 180 , msg
2339    if maxlon != None:
2340        assert -180 < maxlon < 180 , msg
2341
2342
2343    #Get NetCDF data
2344    if verbose: print 'Reading files %s_*.nc' %basename_in
2345    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2346    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2347    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2348    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2349
2350    if basename_out is None:
2351        swwname = basename_in + '.sww'
2352    else:
2353        swwname = basename_out + '.sww'
2354
2355#    Get dimensions of file_h
2356    for dimension in file_h.dimensions.keys():
2357        if dimension[:3] == 'LON':
2358            dim_h_longitude = dimension
2359        if dimension[:3] == 'LAT':
2360            dim_h_latitude = dimension
2361        if dimension[:4] == 'TIME':
2362            dim_h_time = dimension
2363
2364#    print 'long:', dim_h_longitude
2365#    print 'lats:', dim_h_latitude
2366#    print 'times:', dim_h_time
2367
2368    times = file_h.variables[dim_h_time]
2369    latitudes = file_h.variables[dim_h_latitude]
2370    longitudes = file_h.variables[dim_h_longitude]
2371   
2372# get dimensions for file_e
2373    for dimension in file_e.dimensions.keys():
2374        if dimension[:3] == 'LON':
2375            dim_e_longitude = dimension
2376        if dimension[:3] == 'LAT':
2377            dim_e_latitude = dimension
2378        if dimension[:4] == 'TIME':
2379            dim_e_time = dimension
2380
2381# get dimensions for file_u
2382    for dimension in file_u.dimensions.keys():
2383        if dimension[:3] == 'LON':
2384            dim_u_longitude = dimension
2385        if dimension[:3] == 'LAT':
2386            dim_u_latitude = dimension
2387        if dimension[:4] == 'TIME':
2388            dim_u_time = dimension
2389           
2390# get dimensions for file_v
2391    for dimension in file_v.dimensions.keys():
2392        if dimension[:3] == 'LON':
2393            dim_v_longitude = dimension
2394        if dimension[:3] == 'LAT':
2395            dim_v_latitude = dimension
2396        if dimension[:4] == 'TIME':
2397            dim_v_time = dimension
2398
2399
2400    #Precision used by most for lat/lon is 4 or 5 decimals
2401    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2402    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2403
2404    #Check that files are compatible
2405    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2406    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2407    assert allclose(latitudes, e_lat)
2408
2409    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2410    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2411    assert allclose(longitudes, e_lon)
2412
2413    if mint == None:
2414        jmin = 0
2415    else:
2416        jmin = searchsorted(times, mint)
2417
2418    if maxt == None:
2419        jmax=len(times)
2420    else:
2421        jmax = searchsorted(times, maxt)
2422
2423    if minlat == None:
2424        kmin=0
2425    else:
2426        kmin = searchsorted(latitudes, minlat)
2427
2428    if maxlat == None:
2429        kmax = len(latitudes)
2430    else:
2431        kmax = searchsorted(latitudes, maxlat)
2432
2433    if minlon == None:
2434        lmin=0
2435    else:
2436        lmin = searchsorted(longitudes, minlon)
2437
2438    if maxlon == None:
2439        lmax = len(longitudes)
2440    else:
2441        lmax = searchsorted(longitudes, maxlon)
2442
2443#    print' j', jmin, jmax
2444    times = times[jmin:jmax]
2445    latitudes = latitudes[kmin:kmax]
2446    longitudes = longitudes[lmin:lmax]
2447
2448
2449    if verbose: print 'cropping'
2450    zname = 'ELEVATION'
2451
2452    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2453    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2454    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2455    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2456
2457    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2458    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2459    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2460    #        from Numeric import asarray
2461    #        elevations=elevations.tolist()
2462    #        elevations.reverse()
2463    #        elevations=asarray(elevations)
2464    #    else:
2465    #        from Numeric import asarray
2466    #        elevations=elevations.tolist()
2467    #        elevations.reverse()
2468    #        elevations=asarray(elevations)
2469    #        'print hmmm'
2470
2471
2472
2473    #Get missing values
2474    nan_ha = file_h.variables['HA'].missing_value[0]
2475    nan_ua = file_u.variables['UA'].missing_value[0]
2476    nan_va = file_v.variables['VA'].missing_value[0]
2477    if hasattr(file_e.variables[zname],'missing_value'):
2478        nan_e  = file_e.variables[zname].missing_value[0]
2479    else:
2480        nan_e = None
2481
2482    #Cleanup
2483    from Numeric import sometrue
2484
2485    missing = (amplitudes == nan_ha)
2486    if sometrue (missing):
2487        if fail_on_NaN:
2488            msg = 'NetCDFFile %s contains missing values'\
2489                  %(basename_in+'_ha.nc')
2490            raise DataMissingValuesError, msg
2491        else:
2492            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2493
2494    missing = (uspeed == nan_ua)
2495    if sometrue (missing):
2496        if fail_on_NaN:
2497            msg = 'NetCDFFile %s contains missing values'\
2498                  %(basename_in+'_ua.nc')
2499            raise DataMissingValuesError, msg
2500        else:
2501            uspeed = uspeed*(missing==0) + missing*NaN_filler
2502
2503    missing = (vspeed == nan_va)
2504    if sometrue (missing):
2505        if fail_on_NaN:
2506            msg = 'NetCDFFile %s contains missing values'\
2507                  %(basename_in+'_va.nc')
2508            raise DataMissingValuesError, msg
2509        else:
2510            vspeed = vspeed*(missing==0) + missing*NaN_filler
2511
2512
2513    missing = (elevations == nan_e)
2514    if sometrue (missing):
2515        if fail_on_NaN:
2516            msg = 'NetCDFFile %s contains missing values'\
2517                  %(basename_in+'_e.nc')
2518            raise DataMissingValuesError, msg
2519        else:
2520            elevations = elevations*(missing==0) + missing*NaN_filler
2521
2522    #######
2523
2524
2525
2526    number_of_times = times.shape[0]
2527    number_of_latitudes = latitudes.shape[0]
2528    number_of_longitudes = longitudes.shape[0]
2529
2530    assert amplitudes.shape[0] == number_of_times
2531    assert amplitudes.shape[1] == number_of_latitudes
2532    assert amplitudes.shape[2] == number_of_longitudes
2533
2534    if verbose:
2535        print '------------------------------------------------'
2536        print 'Statistics:'
2537        print '  Extent (lat/lon):'
2538        print '    lat in [%f, %f], len(lat) == %d'\
2539              %(min(latitudes.flat), max(latitudes.flat),
2540                len(latitudes.flat))
2541        print '    lon in [%f, %f], len(lon) == %d'\
2542              %(min(longitudes.flat), max(longitudes.flat),
2543                len(longitudes.flat))
2544        print '    t in [%f, %f], len(t) == %d'\
2545              %(min(times.flat), max(times.flat), len(times.flat))
2546
2547        q = amplitudes.flat
2548        name = 'Amplitudes (ha) [cm]'
2549        print %s in [%f, %f]' %(name, min(q), max(q))
2550
2551        q = uspeed.flat
2552        name = 'Speeds (ua) [cm/s]'
2553        print %s in [%f, %f]' %(name, min(q), max(q))
2554
2555        q = vspeed.flat
2556        name = 'Speeds (va) [cm/s]'
2557        print %s in [%f, %f]' %(name, min(q), max(q))
2558
2559        q = elevations.flat
2560        name = 'Elevations (e) [m]'
2561        print %s in [%f, %f]' %(name, min(q), max(q))
2562
2563
2564    #print number_of_latitudes, number_of_longitudes
2565    number_of_points = number_of_latitudes*number_of_longitudes
2566    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2567
2568
2569    file_h.close()
2570    file_u.close()
2571    file_v.close()
2572    file_e.close()
2573
2574
2575    # NetCDF file definition
2576    outfile = NetCDFFile(swwname, 'w')
2577
2578    #Create new file
2579    outfile.institution = 'Geoscience Australia'
2580    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2581                          %(basename_in + '_ha.nc',
2582                            basename_in + '_ua.nc',
2583                            basename_in + '_va.nc',
2584                            basename_in + '_e.nc')
2585
2586
2587    #For sww compatibility
2588    outfile.smoothing = 'Yes'
2589    outfile.order = 1
2590
2591    #Start time in seconds since the epoch (midnight 1/1/1970)
2592    outfile.starttime = starttime = times[0]
2593    times = times - starttime  #Store relative times
2594
2595    # dimension definitions
2596    outfile.createDimension('number_of_volumes', number_of_volumes)
2597
2598    outfile.createDimension('number_of_vertices', 3)
2599    outfile.createDimension('number_of_points', number_of_points)
2600
2601
2602    #outfile.createDimension('number_of_timesteps', len(times))
2603    outfile.createDimension('number_of_timesteps', len(times))
2604
2605    # variable definitions
2606    outfile.createVariable('x', precision, ('number_of_points',))
2607    outfile.createVariable('y', precision, ('number_of_points',))
2608    outfile.createVariable('elevation', precision, ('number_of_points',))
2609
2610    #FIXME: Backwards compatibility
2611    outfile.createVariable('z', precision, ('number_of_points',))
2612    #################################
2613
2614    outfile.createVariable('volumes', Int, ('number_of_volumes',
2615                                            'number_of_vertices'))
2616
2617    outfile.createVariable('time', precision,
2618                           ('number_of_timesteps',))
2619
2620    outfile.createVariable('stage', precision,
2621                           ('number_of_timesteps',
2622                            'number_of_points'))
2623
2624    outfile.createVariable('xmomentum', precision,
2625                           ('number_of_timesteps',
2626                            'number_of_points'))
2627
2628    outfile.createVariable('ymomentum', precision,
2629                           ('number_of_timesteps',
2630                            'number_of_points'))
2631
2632
2633    #Store
2634    from coordinate_transforms.redfearn import redfearn
2635    x = zeros(number_of_points, Float)  #Easting
2636    y = zeros(number_of_points, Float)  #Northing
2637
2638
2639    if verbose: print 'Making triangular grid'
2640    #Check zone boundaries
2641    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2642
2643    vertices = {}
2644    i = 0
2645    for k, lat in enumerate(latitudes):       #Y direction
2646        for l, lon in enumerate(longitudes):  #X direction
2647
2648            vertices[l,k] = i
2649
2650            zone, easting, northing = redfearn(lat,lon)
2651
2652            msg = 'Zone boundary crossed at longitude =', lon
2653            #assert zone == refzone, msg
2654            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2655            x[i] = easting
2656            y[i] = northing
2657            i += 1
2658
2659
2660    #Construct 2 triangles per 'rectangular' element
2661    volumes = []
2662    for l in range(number_of_longitudes-1):    #X direction
2663        for k in range(number_of_latitudes-1): #Y direction
2664            v1 = vertices[l,k+1]
2665            v2 = vertices[l,k]
2666            v3 = vertices[l+1,k+1]
2667            v4 = vertices[l+1,k]
2668
2669            volumes.append([v1,v2,v3]) #Upper element
2670            volumes.append([v4,v3,v2]) #Lower element
2671
2672    volumes = array(volumes)
2673
2674    if origin == None:
2675        zone = refzone
2676        xllcorner = min(x)
2677        yllcorner = min(y)
2678    else:
2679        zone = origin[0]
2680        xllcorner = origin[1]
2681        yllcorner = origin[2]
2682
2683
2684    outfile.xllcorner = xllcorner
2685    outfile.yllcorner = yllcorner
2686    outfile.zone = zone
2687
2688
2689    if elevation is not None:
2690        z = elevation
2691    else:
2692        if inverted_bathymetry:
2693            z = -1*elevations
2694        else:
2695            z = elevations
2696    #FIXME: z should be obtained from MOST and passed in here
2697
2698    from Numeric import resize
2699    z = resize(z,outfile.variables['z'][:].shape)
2700    outfile.variables['x'][:] = x - xllcorner
2701    outfile.variables['y'][:] = y - yllcorner
2702    outfile.variables['z'][:] = z             #FIXME HACK
2703    outfile.variables['elevation'][:] = z
2704    outfile.variables['time'][:] = times   #Store time relative
2705    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2706
2707
2708
2709    #Time stepping
2710    stage = outfile.variables['stage']
2711    xmomentum = outfile.variables['xmomentum']
2712    ymomentum = outfile.variables['ymomentum']
2713
2714    if verbose: print 'Converting quantities'
2715    n = len(times)
2716    for j in range(n):
2717        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2718        i = 0
2719        for k in range(number_of_latitudes):      #Y direction
2720            for l in range(number_of_longitudes): #X direction
2721                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2722                stage[j,i] = w
2723                h = w - z[i]
2724                xmomentum[j,i] = uspeed[j,k,l]/100*h
2725                ymomentum[j,i] = vspeed[j,k,l]/100*h
2726                i += 1
2727
2728    #outfile.close()
2729
2730    #FIXME: Refactor using code from file_function.statistics
2731    #Something like print swwstats(swwname)
2732    if verbose:
2733        x = outfile.variables['x'][:]
2734        y = outfile.variables['y'][:]
2735        times = outfile.variables['time'][:]
2736        print '------------------------------------------------'
2737        print 'Statistics of output file:'
2738        print '  Name: %s' %swwname
2739        print '  Reference:'
2740        print '    Lower left corner: [%f, %f]'\
2741              %(xllcorner, yllcorner)
2742        print '    Start time: %f' %starttime
2743        print '  Extent:'
2744        print '    x [m] in [%f, %f], len(x) == %d'\
2745              %(min(x.flat), max(x.flat), len(x.flat))
2746        print '    y [m] in [%f, %f], len(y) == %d'\
2747              %(min(y.flat), max(y.flat), len(y.flat))
2748        print '    t [s] in [%f, %f], len(t) == %d'\
2749              %(min(times), max(times), len(times))
2750        print '  Quantities [SI units]:'
2751        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2752            q = outfile.variables[name][:].flat
2753            print '    %s in [%f, %f]' %(name, min(q), max(q))
2754
2755
2756
2757    outfile.close()
2758
2759
2760
2761
2762
2763def timefile2netcdf(filename, quantity_names = None):
2764    """Template for converting typical text files with time series to
2765    NetCDF tms file.
2766
2767
2768    The file format is assumed to be either two fields separated by a comma:
2769
2770        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2771
2772    E.g
2773
2774      31/08/04 00:00:00, 1.328223 0 0
2775      31/08/04 00:15:00, 1.292912 0 0
2776
2777    will provide a time dependent function f(t) with three attributes
2778
2779    filename is assumed to be the rootname with extenisons .txt and .sww
2780    """
2781
2782    import time, calendar
2783    from Numeric import array
2784    from config import time_format
2785    from utilities.numerical_tools import ensure_numeric
2786
2787
2788
2789    fid = open(filename + '.txt')
2790    line = fid.readline()
2791    fid.close()
2792
2793    fields = line.split(',')
2794    msg = 'File %s must have the format date, value0 value1 value2 ...'
2795    assert len(fields) == 2, msg
2796
2797    try:
2798        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2799    except ValueError:
2800        msg = 'First field in file %s must be' %filename
2801        msg += ' date-time with format %s.\n' %time_format
2802        msg += 'I got %s instead.' %fields[0]
2803        raise DataTimeError, msg
2804
2805
2806    #Split values
2807    values = []
2808    for value in fields[1].split():
2809        values.append(float(value))
2810
2811    q = ensure_numeric(values)
2812
2813    msg = 'ERROR: File must contain at least one independent value'
2814    assert len(q.shape) == 1, msg
2815
2816
2817
2818    #Read times proper
2819    from Numeric import zeros, Float, alltrue
2820    from config import time_format
2821    import time, calendar
2822
2823    fid = open(filename + '.txt')
2824    lines = fid.readlines()
2825    fid.close()
2826
2827    N = len(lines)
2828    d = len(q)
2829
2830    T = zeros(N, Float)       #Time
2831    Q = zeros((N, d), Float)  #Values
2832
2833    for i, line in enumerate(lines):
2834        fields = line.split(',')
2835        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2836
2837        T[i] = realtime - starttime
2838
2839        for j, value in enumerate(fields[1].split()):
2840            Q[i, j] = float(value)
2841
2842    msg = 'File %s must list time as a monotonuosly ' %filename
2843    msg += 'increasing sequence'
2844    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2845
2846
2847    #Create NetCDF file
2848    from Scientific.IO.NetCDF import NetCDFFile
2849
2850    fid = NetCDFFile(filename + '.tms', 'w')
2851
2852
2853    fid.institution = 'Geoscience Australia'
2854    fid.description = 'Time series'
2855
2856
2857    #Reference point
2858    #Start time in seconds since the epoch (midnight 1/1/1970)
2859    #FIXME: Use Georef
2860    fid.starttime = starttime
2861
2862
2863    # dimension definitions
2864    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2865    #fid.createDimension('number_of_vertices', 3)
2866
2867
2868    fid.createDimension('number_of_timesteps', len(T))
2869
2870    fid.createVariable('time', Float, ('number_of_timesteps',))
2871
2872    fid.variables['time'][:] = T
2873
2874    for i in range(Q.shape[1]):
2875        try:
2876            name = quantity_names[i]
2877        except:
2878            name = 'Attribute%d'%i
2879
2880        fid.createVariable(name, Float, ('number_of_timesteps',))
2881        fid.variables[name][:] = Q[:,i]
2882
2883    fid.close()
2884
2885
2886def extent_sww(file_name):
2887    """
2888    Read in an sww file.
2889
2890    Input;
2891    file_name - the sww file
2892
2893    Output;
2894    z - Vector of bed elevation
2895    volumes - Array.  Each row has 3 values, representing
2896    the vertices that define the volume
2897    time - Vector of the times where there is stage information
2898    stage - array with respect to time and vertices (x,y)
2899    """
2900
2901
2902    from Scientific.IO.NetCDF import NetCDFFile
2903
2904    #Check contents
2905    #Get NetCDF
2906    fid = NetCDFFile(file_name, 'r')
2907
2908    # Get the variables
2909    x = fid.variables['x'][:]
2910    y = fid.variables['y'][:]
2911    stage = fid.variables['stage'][:]
2912    #print "stage",stage
2913    #print "stage.shap",stage.shape
2914    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2915    #print "min(stage)",min(stage)
2916
2917    fid.close()
2918
2919    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2920
2921
2922def sww2domain(filename,boundary=None,t=None,\
2923               fail_if_NaN=True,NaN_filler=0\
2924               ,verbose = False,very_verbose = False):
2925    """
2926    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2927
2928    Boundary is not recommended if domain.smooth is not selected, as it
2929    uses unique coordinates, but not unique boundaries. This means that
2930    the boundary file will not be compatable with the coordinates, and will
2931    give a different final boundary, or crash.
2932    """
2933    NaN=9.969209968386869e+036
2934    #initialise NaN.
2935
2936    from Scientific.IO.NetCDF import NetCDFFile
2937    from shallow_water import Domain
2938    from Numeric import asarray, transpose, resize
2939
2940    if verbose: print 'Reading from ', filename
2941    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2942    time = fid.variables['time']       #Timesteps
2943    if t is None:
2944        t = time[-1]
2945    time_interp = get_time_interp(time,t)
2946
2947    # Get the variables as Numeric arrays
2948    x = fid.variables['x'][:]             #x-coordinates of vertices
2949    y = fid.variables['y'][:]             #y-coordinates of vertices
2950    elevation = fid.variables['elevation']     #Elevation
2951    stage = fid.variables['stage']     #Water level
2952    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2953    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2954
2955    starttime = fid.starttime[0]
2956    volumes = fid.variables['volumes'][:] #Connectivity
2957    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2958
2959    conserved_quantities = []
2960    interpolated_quantities = {}
2961    other_quantities = []
2962
2963    # get geo_reference
2964    #sww files don't have to have a geo_ref
2965    try:
2966        geo_reference = Geo_reference(NetCDFObject=fid)
2967    except: #AttributeError, e:
2968        geo_reference = None
2969
2970    if verbose: print '    getting quantities'
2971    for quantity in fid.variables.keys():
2972        dimensions = fid.variables[quantity].dimensions
2973        if 'number_of_timesteps' in dimensions:
2974            conserved_quantities.append(quantity)
2975            interpolated_quantities[quantity]=\
2976                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2977        else: other_quantities.append(quantity)
2978
2979    other_quantities.remove('x')
2980    other_quantities.remove('y')
2981    other_quantities.remove('z')
2982    other_quantities.remove('volumes')
2983
2984    conserved_quantities.remove('time')
2985
2986    if verbose: print '    building domain'
2987    #    From domain.Domain:
2988    #    domain = Domain(coordinates, volumes,\
2989    #                    conserved_quantities = conserved_quantities,\
2990    #                    other_quantities = other_quantities,zone=zone,\
2991    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2992
2993    #   From shallow_water.Domain:
2994    coordinates=coordinates.tolist()
2995    volumes=volumes.tolist()
2996    #FIXME:should this be in mesh?(peter row)
2997    if fid.smoothing == 'Yes': unique = False
2998    else: unique = True
2999    if unique:
3000        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3001
3002
3003    try:
3004        domain = Domain(coordinates, volumes, boundary)
3005    except AssertionError, e:
3006        fid.close()
3007        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3008        raise DataDomainError, msg
3009
3010    if not boundary is None:
3011        domain.boundary = boundary
3012
3013    domain.geo_reference = geo_reference
3014
3015    domain.starttime=float(starttime)+float(t)
3016    domain.time=0.0
3017
3018    for quantity in other_quantities:
3019        try:
3020            NaN = fid.variables[quantity].missing_value
3021        except:
3022            pass #quantity has no missing_value number
3023        X = fid.variables[quantity][:]
3024        if very_verbose:
3025            print '       ',quantity
3026            print '        NaN =',NaN
3027            print '        max(X)'
3028            print '       ',max(X)
3029            print '        max(X)==NaN'
3030            print '       ',max(X)==NaN
3031            print ''
3032        if (max(X)==NaN) or (min(X)==NaN):
3033            if fail_if_NaN:
3034                msg = 'quantity "%s" contains no_data entry'%quantity
3035                raise DataMissingValuesError, msg
3036            else:
3037                data = (X<>NaN)
3038                X = (X*data)+(data==0)*NaN_filler
3039        if unique:
3040            X = resize(X,(len(X)/3,3))
3041        domain.set_quantity(quantity,X)
3042    #
3043    for quantity in conserved_quantities:
3044        try:
3045            NaN = fid.variables[quantity].missing_value
3046        except:
3047            pass #quantity has no missing_value number
3048        X = interpolated_quantities[quantity]
3049        if very_verbose:
3050            print '       ',quantity
3051            print '        NaN =',NaN
3052            print '        max(X)'
3053            print '       ',max(X)
3054            print '        max(X)==NaN'
3055            print '       ',max(X)==NaN
3056            print ''
3057        if (max(X)==NaN) or (min(X)==NaN):
3058            if fail_if_NaN:
3059                msg = 'quantity "%s" contains no_data entry'%quantity
3060                raise DataMissingValuesError, msg
3061            else:
3062                data = (X<>NaN)
3063                X = (X*data)+(data==0)*NaN_filler
3064        if unique:
3065            X = resize(X,(X.shape[0]/3,3))
3066        domain.set_quantity(quantity,X)
3067
3068    fid.close()
3069    return domain
3070
3071def interpolated_quantity(saved_quantity,time_interp):
3072
3073    #given an index and ratio, interpolate quantity with respect to time.
3074    index,ratio = time_interp
3075    Q = saved_quantity
3076    if ratio > 0:
3077        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3078    else:
3079        q = Q[index]
3080    #Return vector of interpolated values
3081    return q
3082
3083def get_time_interp(time,t=None):
3084    #Finds the ratio and index for time interpolation.
3085    #It is borrowed from previous pyvolution code.
3086    if t is None:
3087        t=time[-1]
3088        index = -1
3089        ratio = 0.
3090    else:
3091        T = time
3092        tau = t
3093        index=0
3094        msg = 'Time interval derived from file %s [%s:%s]'\
3095            %('FIXMEfilename', T[0], T[-1])
3096        msg += ' does not match model time: %s' %tau
3097        if tau < time[0]: raise DataTimeError, msg
3098        if tau > time[-1]: raise DataTimeError, msg
3099        while tau > time[index]: index += 1
3100        while tau < time[index]: index -= 1
3101        if tau == time[index]:
3102            #Protect against case where tau == time[-1] (last time)
3103            # - also works in general when tau == time[i]
3104            ratio = 0
3105        else:
3106            #t is now between index and index+1
3107            ratio = (tau - time[index])/(time[index+1] - time[index])
3108    return (index,ratio)
3109
3110
3111def weed(coordinates,volumes,boundary = None):
3112    if type(coordinates)=='array':
3113        coordinates = coordinates.tolist()
3114    if type(volumes)=='array':
3115        volumes = volumes.tolist()
3116
3117    unique = False
3118    point_dict = {}
3119    same_point = {}
3120    for i in range(len(coordinates)):
3121        point = tuple(coordinates[i])
3122        if point_dict.has_key(point):
3123            unique = True
3124            same_point[i]=point
3125            #to change all point i references to point j
3126        else:
3127            point_dict[point]=i
3128            same_point[i]=point
3129
3130    coordinates = []
3131    i = 0
3132    for point in point_dict.keys():
3133        point = tuple(point)
3134        coordinates.append(list(point))
3135        point_dict[point]=i
3136        i+=1
3137
3138
3139    for volume in volumes:
3140        for i in range(len(volume)):
3141            index = volume[i]
3142            if index>-1:
3143                volume[i]=point_dict[same_point[index]]
3144
3145    new_boundary = {}
3146    if not boundary is None:
3147        for segment in boundary.keys():
3148            point0 = point_dict[same_point[segment[0]]]
3149            point1 = point_dict[same_point[segment[1]]]
3150            label = boundary[segment]
3151            #FIXME should the bounday attributes be concaterated
3152            #('exterior, pond') or replaced ('pond')(peter row)
3153
3154            if new_boundary.has_key((point0,point1)):
3155                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3156                                              #+','+label
3157
3158            elif new_boundary.has_key((point1,point0)):
3159                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3160                                              #+','+label
3161            else: new_boundary[(point0,point1)]=label
3162
3163        boundary = new_boundary
3164
3165    return coordinates,volumes,boundary
3166
3167
3168def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3169                 verbose=False):
3170    """Read Digitial Elevation model from the following NetCDF format (.dem)
3171
3172    Example:
3173
3174    ncols         3121
3175    nrows         1800
3176    xllcorner     722000
3177    yllcorner     5893000
3178    cellsize      25
3179    NODATA_value  -9999
3180    138.3698 137.4194 136.5062 135.5558 ..........
3181
3182    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3183    """
3184
3185    import os
3186    from Scientific.IO.NetCDF import NetCDFFile
3187    from Numeric import Float, zeros, sum, reshape, equal
3188
3189    root = basename_in
3190    inname = root + '.dem'
3191
3192    #Open existing netcdf file to read
3193    infile = NetCDFFile(inname, 'r')
3194    if verbose: print 'Reading DEM from %s' %inname
3195
3196    #Read metadata
3197    ncols = infile.ncols[0]
3198    nrows = infile.nrows[0]
3199    xllcorner = infile.xllcorner[0]
3200    yllcorner = infile.yllcorner[0]
3201    cellsize = infile.cellsize[0]
3202    NODATA_value = infile.NODATA_value[0]
3203    zone = infile.zone[0]
3204    false_easting = infile.false_easting[0]
3205    false_northing = infile.false_northing[0]
3206    projection = infile.projection
3207    datum = infile.datum
3208    units = infile.units
3209
3210    dem_elevation = infile.variables['elevation']
3211
3212    #Get output file name
3213    if basename_out == None:
3214        outname = root + '_' + repr(cellsize_new) + '.dem'
3215    else:
3216        outname = basename_out + '.dem'
3217
3218    if verbose: print 'Write decimated NetCDF file to %s' %outname
3219
3220    #Determine some dimensions for decimated grid
3221    (nrows_stencil, ncols_stencil) = stencil.shape
3222    x_offset = ncols_stencil / 2
3223    y_offset = nrows_stencil / 2
3224    cellsize_ratio = int(cellsize_new / cellsize)
3225    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3226    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3227
3228    #Open netcdf file for output
3229    outfile = NetCDFFile(outname, 'w')
3230
3231    #Create new file
3232    outfile.institution = 'Geoscience Australia'
3233    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3234                           'of spatial point data'
3235    #Georeferencing
3236    outfile.zone = zone
3237    outfile.projection = projection
3238    outfile.datum = datum
3239    outfile.units = units
3240
3241    outfile.cellsize = cellsize_new
3242    outfile.NODATA_value = NODATA_value
3243    outfile.false_easting = false_easting
3244    outfile.false_northing = false_northing
3245
3246    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3247    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3248    outfile.ncols = ncols_new
3249    outfile.nrows = nrows_new
3250
3251    # dimension definition
3252    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3253
3254    # variable definition
3255    outfile.createVariable('elevation', Float, ('number_of_points',))
3256
3257    # Get handle to the variable
3258    elevation = outfile.variables['elevation']
3259
3260    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3261
3262    #Store data
3263    global_index = 0
3264    for i in range(nrows_new):
3265        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3266        lower_index = global_index
3267        telev =  zeros(ncols_new, Float)
3268        local_index = 0
3269        trow = i * cellsize_ratio
3270
3271        for j in range(ncols_new):
3272            tcol = j * cellsize_ratio
3273            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3274
3275            #if dem contains 1 or more NODATA_values set value in
3276            #decimated dem to NODATA_value, else compute decimated
3277            #value using stencil
3278            if sum(sum(equal(tmp, NODATA_value))) > 0:
3279                telev[local_index] = NODATA_value
3280            else:
3281                telev[local_index] = sum(sum(tmp * stencil))
3282
3283            global_index += 1
3284            local_index += 1
3285
3286        upper_index = global_index
3287
3288        elevation[lower_index:upper_index] = telev
3289
3290    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3291
3292    infile.close()
3293    outfile.close()
3294
3295
3296
3297
3298def tsh2sww(filename, verbose=False): #test_tsh2sww
3299    """
3300    to check if a tsh/msh file 'looks' good.
3301    """
3302
3303    #FIXME Move to data_manager
3304    from shallow_water import Domain
3305    from pmesh2domain import pmesh_to_domain_instance
3306    import time, os
3307    from data_manager import get_dataobject
3308    from os import sep, path
3309    from utilities.numerical_tools import mean
3310
3311    if verbose == True:print 'Creating domain from', filename
3312    domain = pmesh_to_domain_instance(filename, Domain)
3313    if verbose == True:print "Number of triangles = ", len(domain)
3314
3315    domain.smooth = True
3316    domain.format = 'sww'   #Native netcdf visualisation format
3317    file_path, filename = path.split(filename)
3318    filename, ext = path.splitext(filename)
3319    domain.filename = filename
3320    domain.reduction = mean
3321    if verbose == True:print "file_path",file_path
3322    if file_path == "":file_path = "."
3323    domain.set_datadir(file_path)
3324
3325    if verbose == True:
3326        print "Output written to " + domain.get_datadir() + sep + \
3327              domain.filename + "." + domain.format
3328    sww = get_dataobject(domain)
3329    sww.store_connectivity()
3330    sww.store_timestep('stage')
3331
3332
3333def asc_csiro2sww(bath_dir,
3334                  elevation_dir,
3335                  ucur_dir,
3336                  vcur_dir,
3337                  sww_file,
3338                  minlat = None, maxlat = None,
3339                  minlon = None, maxlon = None,
3340                  zscale=1,
3341                  mean_stage = 0,
3342                  fail_on_NaN = True,
3343                  elevation_NaN_filler = 0,
3344                  bath_prefix='ba',
3345                  elevation_prefix='el',
3346                  verbose=False):
3347    """
3348    Produce an sww boundary file, from esri ascii data from CSIRO.
3349
3350    Also convert latitude and longitude to UTM. All coordinates are
3351    assumed to be given in the GDA94 datum.
3352
3353    assume:
3354    All files are in esri ascii format
3355
3356    4 types of information
3357    bathymetry
3358    elevation
3359    u velocity
3360    v velocity
3361
3362    Assumptions
3363    The metadata of all the files is the same
3364    Each type is in a seperate directory
3365    One bath file with extention .000
3366    The time period is less than 24hrs and uniform.
3367    """
3368    from Scientific.IO.NetCDF import NetCDFFile
3369
3370    from coordinate_transforms.redfearn import redfearn
3371
3372    precision = Float # So if we want to change the precision its done here
3373
3374    # go in to the bath dir and load the only file,
3375    bath_files = os.listdir(bath_dir)
3376    #print "bath_files",bath_files
3377
3378    #fixme: make more general?
3379    bath_file = bath_files[0]
3380    bath_dir_file =  bath_dir + os.sep + bath_file
3381    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3382    #print "bath_metadata",bath_metadata
3383    #print "bath_grid",bath_grid
3384
3385    #Use the date.time of the bath file as a basis for
3386    #the start time for other files
3387    base_start = bath_file[-12:]
3388
3389    #go into the elevation dir and load the 000 file
3390    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3391                         + base_start
3392    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3393    #print "elevation_dir_file",elevation_dir_file
3394    #print "elevation_grid", elevation_grid
3395
3396    elevation_files = os.listdir(elevation_dir)
3397    ucur_files = os.listdir(ucur_dir)
3398    vcur_files = os.listdir(vcur_dir)
3399
3400    # the first elevation file should be the
3401    # file with the same base name as the bath data
3402    #print "elevation_files[0]",elevation_files[0]
3403    #print "'el' + base_start",'el' + base_start
3404    assert elevation_files[0] == 'el' + base_start
3405
3406    #assert bath_metadata == elevation_metadata
3407
3408
3409
3410    number_of_latitudes = bath_grid.shape[0]
3411    number_of_longitudes = bath_grid.shape[1]
3412    #number_of_times = len(os.listdir(elevation_dir))
3413    #number_of_points = number_of_latitudes*number_of_longitudes
3414    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3415
3416    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3417                  for x in range(number_of_longitudes)]
3418    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3419                 for y in range(number_of_latitudes)]
3420
3421     # reverse order of lat, so the fist lat represents the first grid row
3422    latitudes.reverse()
3423
3424    #print "latitudes - before _get_min_max_indexes",latitudes
3425    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3426                                                 minlat=minlat, maxlat=maxlat,
3427                                                 minlon=minlon, maxlon=maxlon)
3428
3429
3430    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3431    #print "bath_grid",bath_grid
3432    latitudes = latitudes[kmin:kmax]
3433    longitudes = longitudes[lmin:lmax]
3434    number_of_latitudes = len(latitudes)
3435    number_of_longitudes = len(longitudes)
3436    number_of_times = len(os.listdir(elevation_dir))
3437    number_of_points = number_of_latitudes*number_of_longitudes
3438    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3439    #print "number_of_points",number_of_points
3440
3441    #Work out the times
3442    if len(elevation_files) > 1:
3443        # Assume: The time period is less than 24hrs.
3444        time_period = (int(elevation_files[1][-3:]) - \
3445                      int(elevation_files[0][-3:]))*60*60
3446        times = [x*time_period for x in range(len(elevation_files))]
3447    else:
3448        times = [0.0]
3449    #print "times", times
3450    #print "number_of_latitudes", number_of_latitudes
3451    #print "number_of_longitudes", number_of_longitudes
3452    #print "number_of_times", number_of_times
3453    #print "latitudes", latitudes
3454    #print "longitudes", longitudes
3455
3456
3457    if verbose:
3458        print '------------------------------------------------'
3459        print 'Statistics:'
3460        print '  Extent (lat/lon):'
3461        print '    lat in [%f, %f], len(lat) == %d'\
3462              %(min(latitudes), max(latitudes),
3463                len(latitudes))
3464        print '    lon in [%f, %f], len(lon) == %d'\
3465              %(min(longitudes), max(longitudes),
3466                len(longitudes))
3467        print '    t in [%f, %f], len(t) == %d'\
3468              %(min(times), max(times), len(times))
3469
3470    ######### WRITE THE SWW FILE #############
3471    # NetCDF file definition
3472    outfile = NetCDFFile(sww_file, 'w')
3473
3474    #Create new file
3475    outfile.institution = 'Geoscience Australia'
3476    outfile.description = 'Converted from XXX'
3477
3478
3479    #For sww compatibility
3480    outfile.smoothing = 'Yes'
3481    outfile.order = 1
3482
3483    #Start time in seconds since the epoch (midnight 1/1/1970)
3484    outfile.starttime = starttime = times[0]
3485
3486
3487    # dimension definitions
3488    outfile.createDimension('number_of_volumes', number_of_volumes)
3489
3490    outfile.createDimension('number_of_vertices', 3)
3491    outfile.createDimension('number_of_points', number_of_points)
3492    outfile.createDimension('number_of_timesteps', number_of_times)
3493
3494    # variable definitions
3495    outfile.createVariable('x', precision, ('number_of_points',))
3496    outfile.createVariable('y', precision, ('number_of_points',))
3497    outfile.createVariable('elevation', precision, ('number_of_points',))
3498
3499    #FIXME: Backwards compatibility
3500    outfile.createVariable('z', precision, ('number_of_points',))
3501    #################################
3502
3503    outfile.createVariable('volumes', Int, ('number_of_volumes',
3504                                            'number_of_vertices'))
3505
3506    outfile.createVariable('time', precision,
3507                           ('number_of_timesteps',))
3508
3509    outfile.createVariable('stage', precision,
3510                           ('number_of_timesteps',
3511                            'number_of_points'))
3512
3513    outfile.createVariable('xmomentum', precision,
3514                           ('number_of_timesteps',
3515                            'number_of_points'))
3516
3517    outfile.createVariable('ymomentum', precision,
3518                           ('number_of_timesteps',
3519                            'number_of_points'))
3520
3521    #Store
3522    from coordinate_transforms.redfearn import redfearn
3523    x = zeros(number_of_points, Float)  #Easting
3524    y = zeros(number_of_points, Float)  #Northing
3525
3526    if verbose: print 'Making triangular grid'
3527    #Get zone of 1st point.
3528    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3529
3530    vertices = {}
3531    i = 0
3532    for k, lat in enumerate(latitudes):
3533        for l, lon in enumerate(longitudes):
3534
3535            vertices[l,k] = i
3536
3537            zone, easting, northing = redfearn(lat,lon)
3538
3539            msg = 'Zone boundary crossed at longitude =', lon
3540            #assert zone == refzone, msg
3541            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3542            x[i] = easting
3543            y[i] = northing
3544            i += 1
3545
3546
3547    #Construct 2 triangles per 'rectangular' element
3548    volumes = []
3549    for l in range(number_of_longitudes-1):    #X direction
3550        for k in range(number_of_latitudes-1): #Y direction
3551            v1 = vertices[l,k+1]
3552            v2 = vertices[l,k]
3553            v3 = vertices[l+1,k+1]
3554            v4 = vertices[l+1,k]
3555
3556            #Note, this is different to the ferrit2sww code
3557            #since the order of the lats is reversed.
3558            volumes.append([v1,v3,v2]) #Upper element
3559            volumes.append([v4,v2,v3]) #Lower element
3560
3561    volumes = array(volumes)
3562
3563    geo_ref = Geo_reference(refzone,min(x),min(y))
3564    geo_ref.write_NetCDF(outfile)
3565
3566    # This will put the geo ref in the middle
3567    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3568
3569
3570    if verbose:
3571        print '------------------------------------------------'
3572        print 'More Statistics:'
3573        print '  Extent (/lon):'
3574        print '    x in [%f, %f], len(lat) == %d'\
3575              %(min(x), max(x),
3576                len(x))
3577        print '    y in [%f, %f], len(lon) == %d'\
3578              %(min(y), max(y),
3579                len(y))
3580        print 'geo_ref: ',geo_ref
3581
3582    z = resize(bath_grid,outfile.variables['z'][:].shape)
3583    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3584    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3585    outfile.variables['z'][:] = z
3586    outfile.variables['elevation'][:] = z  #FIXME HACK
3587    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3588
3589    # do this to create an ok sww file.
3590    #outfile.variables['time'][:] = [0]   #Store time relative
3591    #outfile.variables['stage'] = z
3592    # put the next line up in the code after outfile.order = 1
3593    #number_of_times = 1
3594
3595    stage = outfile.variables['stage']
3596    xmomentum = outfile.variables['xmomentum']
3597    ymomentum = outfile.variables['ymomentum']
3598
3599    outfile.variables['time'][:] = times   #Store time relative
3600
3601    if verbose: print 'Converting quantities'
3602    n = number_of_times
3603    for j in range(number_of_times):
3604        # load in files
3605        elevation_meta, elevation_grid = \
3606           _read_asc(elevation_dir + os.sep + elevation_files[j])
3607
3608        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3609        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3610
3611        #print "elevation_grid",elevation_grid
3612        #cut matrix to desired size
3613        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3614        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3615        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3616        #print "elevation_grid",elevation_grid
3617        # handle missing values
3618        missing = (elevation_grid == elevation_meta['NODATA_value'])
3619        if sometrue (missing):
3620            if fail_on_NaN:
3621                msg = 'File %s contains missing values'\
3622                      %(elevation_files[j])
3623                raise DataMissingValuesError, msg
3624            else:
3625                elevation_grid = elevation_grid*(missing==0) + \
3626                                 missing*elevation_NaN_filler
3627
3628
3629        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3630        i = 0
3631        for k in range(number_of_latitudes):      #Y direction
3632            for l in range(number_of_longitudes): #X direction
3633                w = zscale*elevation_grid[k,l] + mean_stage
3634                stage[j,i] = w
3635                h = w - z[i]
3636                xmomentum[j,i] = u_momentum_grid[k,l]*h
3637                ymomentum[j,i] = v_momentum_grid[k,l]*h
3638                i += 1
3639    outfile.close()
3640
3641def _get_min_max_indexes(latitudes,longitudes,
3642                        minlat=None, maxlat=None,
3643                        minlon=None, maxlon=None):
3644    """
3645    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3646    specified with min/max lat/long
3647
3648    Think of the latitudes and longitudes describing a 2d surface.
3649    The area returned is, if possible, just big enough to cover the
3650    inputed max/min area. (This will not be possible if the max/min area
3651    has a section outside of the latitudes/longitudes area.)
3652
3653    assume latitudess & longitudes are sorted,
3654    long - from low to high (west to east, eg 148 - 151)
3655    lat - from high to low (north to south, eg -35 - -38)
3656    """
3657
3658    # reverse order of lat, so it's in ascending order
3659    latitudes.reverse()
3660    largest_lat_index = len(latitudes)-1
3661    #Cut out a smaller extent.
3662    if minlat == None:
3663        lat_min_index = 0
3664    else:
3665        lat_min_index = searchsorted(latitudes, minlat)-1
3666        if lat_min_index <0:
3667            lat_min_index = 0
3668
3669
3670    if maxlat == None:
3671        lat_max_index = largest_lat_index #len(latitudes)
3672    else:
3673        lat_max_index = searchsorted(latitudes, maxlat)
3674        if lat_max_index > largest_lat_index:
3675            lat_max_index = largest_lat_index
3676
3677    if minlon == None:
3678        lon_min_index = 0
3679    else:
3680        lon_min_index = searchsorted(longitudes, minlon)-1
3681        if lon_min_index <0:
3682            lon_min_index = 0
3683
3684    if maxlon == None:
3685        lon_max_index = len(longitudes)
3686    else:
3687        lon_max_index = searchsorted(longitudes, maxlon)
3688
3689    #Take into account that the latitude list was reversed
3690    latitudes.reverse() # Python passes by reference, need to swap back
3691    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3692                                   largest_lat_index - lat_min_index
3693    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3694    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3695
3696    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3697
3698
3699def _read_asc(filename, verbose=False):
3700    """Read esri file from the following ASCII format (.asc)
3701
3702    Example:
3703
3704    ncols         3121
3705    nrows         1800
3706    xllcorner     722000
3707    yllcorner     5893000
3708    cellsize      25
3709    NODATA_value  -9999
3710    138.3698 137.4194 136.5062 135.5558 ..........
3711
3712    """
3713
3714    datafile = open(filename)
3715
3716    if verbose: print 'Reading DEM from %s' %(filename)
3717    lines = datafile.readlines()
3718    datafile.close()
3719
3720    if verbose: print 'Got', len(lines), ' lines'
3721
3722    ncols = int(lines.pop(0).split()[1].strip())
3723    nrows = int(lines.pop(0).split()[1].strip())
3724    xllcorner = float(lines.pop(0).split()[1].strip())
3725    yllcorner = float(lines.pop(0).split()[1].strip())
3726    cellsize = float(lines.pop(0).split()[1].strip())
3727    NODATA_value = float(lines.pop(0).split()[1].strip())
3728
3729    assert len(lines) == nrows
3730
3731    #Store data
3732    grid = []
3733
3734    n = len(lines)
3735    for i, line in enumerate(lines):
3736        cells = line.split()
3737        assert len(cells) == ncols
3738        grid.append(array([float(x) for x in cells]))
3739    grid = array(grid)
3740
3741    return {'xllcorner':xllcorner,
3742            'yllcorner':yllcorner,
3743            'cellsize':cellsize,
3744            'NODATA_value':NODATA_value}, grid
3745
3746
3747# FIXME (Ole): Is this doing anything at all?
3748def sww2timeseries(swwfile,
3749                   gauge_filename,
3750                   gauge_data_outname,
3751                   quantity = None,
3752                   time_min = None,
3753                   time_max = None,
3754                   verbose = False):
3755
3756    """Read SWW file and extract time series for prescribed quantities at
3757    gauge locations.
3758
3759    The gauge locations are defined in gauge_filename. This file should be
3760    in the form: gaugename, easting, northing, and should be stored in a
3761    .csv or .xya file.
3762
3763    Time series data at the gauges can be written to file (for example,
3764    Benfield requested raw data) with the default being no data written.
3765
3766    The parameter quantity must be the name of an existing quantity or
3767    an expression involving existing quantities. The default is
3768    'depth'.
3769
3770    The user can define a list of quantities. The possibilities are
3771    the conserved quantitues of the shallow water wave equation and other
3772    quantities which can be derived from those, i.e.
3773    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3774
3775    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3776    Note, units of momentum are m^2/s and depth is m.
3777
3778    Velocity is absolute momentum divided by depth. (Confirming correct units:
3779    vel = abs mom / depth = (m^2/s)/m = m/s.)
3780
3781    Bearing returns the angle of the velocity vector from North.
3782
3783    If time_min and time_max is given, output plotted in that time range.
3784    The default is to plot the entire range of the time evident in sww file.
3785
3786    The export graphic format in 'png' and will be stored in the same
3787    directory as the input file.
3788    """
3789
3790    if quantity is None: quantity = 'depth'
3791
3792    # extract gauge locations from gauge file
3793
3794    # extract all quantities from swwfile (f = file_function)
3795    if time_min is None: time_min = min(f.get_time())
3796    if time_max is None: time_max = max(f.get_time())
3797
3798    # loop through appropriate range of time
3799    # plot prescribed quantities and export data if requested
3800
3801    #if gauge_data_outname is None:
3802
3803    return
Note: See TracBrowser for help on using the repository browser.