source: inundation/pyvolution/data_manager.py @ 2924

Last change on this file since 2924 was 2924, checked in by duncan, 18 years ago

changing pyvolution code so fit_interpolate/fit.py is used.

File size: 113.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class DataMissingValuesError(exceptions.Exception): pass
53class DataFileNotOpenError(exceptions.Exception): pass
54class DataTimeError(exceptions.Exception): pass
55class DataDomainError(exceptions.Exception): pass
56
57
58
59import os
60
61from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
62     searchsorted, zeros, allclose, around
63
64
65from coordinate_transforms.geo_reference import Geo_reference
66
67
68def make_filename(s):
69    """Transform argument string into a suitable filename
70    """
71
72    s = s.strip()
73    s = s.replace(' ', '_')
74    s = s.replace('(', '')
75    s = s.replace(')', '')
76    s = s.replace('__', '_')
77
78    return s
79
80
81def check_dir(path, verbose=None):
82    """Check that specified path exists.
83    If path does not exist it will be created if possible
84
85    USAGE:
86       checkdir(path, verbose):
87
88    ARGUMENTS:
89        path -- Directory
90        verbose -- Flag verbose output (default: None)
91
92    RETURN VALUE:
93        Verified path including trailing separator
94
95    """
96
97    import os, sys
98    import os.path
99
100    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
101        unix = 0
102    else:
103        unix = 1
104
105
106    if path[-1] != os.sep:
107        path = path + os.sep  # Add separator for directories
108
109    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
110    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
111        try:
112            exitcode=os.mkdir(path)
113
114            # Change access rights if possible
115            #
116            if unix:
117                exitcode=os.system('chmod 775 '+path)
118            else:
119                pass  # FIXME: What about acces rights under Windows?
120
121            if verbose: print 'MESSAGE: Directory', path, 'created.'
122
123        except:
124            print 'WARNING: Directory', path, 'could not be created.'
125            if unix:
126                path = '/tmp/'
127            else:
128                path = 'C:'
129
130            print 'Using directory %s instead' %path
131
132    return(path)
133
134
135
136def del_dir(path):
137    """Recursively delete directory path and all its contents
138    """
139
140    import os
141
142    if os.path.isdir(path):
143        for file in os.listdir(path):
144            X = os.path.join(path, file)
145
146
147            if os.path.isdir(X) and not os.path.islink(X):
148                del_dir(X)
149            else:
150                try:
151                    os.remove(X)
152                except:
153                    print "Could not remove file %s" %X
154
155        os.rmdir(path)
156
157
158
159def create_filename(datadir, filename, format, size=None, time=None):
160
161    import os
162    #from config import data_dir
163
164    FN = check_dir(datadir) + filename
165
166    if size is not None:
167        FN += '_size%d' %size
168
169    if time is not None:
170        FN += '_time%.2f' %time
171
172    FN += '.' + format
173    return FN
174
175
176def get_files(datadir, filename, format, size):
177    """Get all file (names) with gven name, size and format
178    """
179
180    import glob
181
182    import os
183    #from config import data_dir
184
185    dir = check_dir(datadir)
186
187    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
188    return glob.glob(pattern)
189
190
191
192#Generic class for storing output to e.g. visualisation or checkpointing
193class Data_format:
194    """Generic interface to data formats
195    """
196
197
198    def __init__(self, domain, extension, mode = 'w'):
199        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
200                                        '''   'w' (write)'''+\
201                                        '''   'r' (read)''' +\
202                                        '''   'a' (append)'''
203
204        #Create filename
205        #self.filename = create_filename(domain.get_datadir(),
206        #domain.get_name(), extension, len(domain))
207
208
209        self.filename = create_filename(domain.get_datadir(),
210            domain.get_name(), extension)
211
212        #print 'F', self.filename
213        self.timestep = 0
214        self.number_of_volumes = len(domain)
215        self.domain = domain
216
217
218        #FIXME: Should we have a general set_precision function?
219
220
221
222#Class for storing output to e.g. visualisation
223class Data_format_sww(Data_format):
224    """Interface to native NetCDF format (.sww) for storing model output
225
226    There are two kinds of data
227
228    1: Constant data: Vertex coordinates and field values. Stored once
229    2: Variable data: Conserved quantities. Stored once per timestep.
230
231    All data is assumed to reside at vertex locations.
232    """
233
234
235    def __init__(self, domain, mode = 'w',\
236                 max_size = 2000000000,
237                 recursion = False):
238        from Scientific.IO.NetCDF import NetCDFFile
239        from Numeric import Int, Float, Float32
240
241        self.precision = Float32 #Use single precision for quantities
242        if hasattr(domain, 'max_size'):
243            self.max_size = domain.max_size #file size max is 2Gig
244        else:
245            self.max_size = max_size
246        self.recursion = recursion
247        self.mode = mode
248
249        Data_format.__init__(self, domain, 'sww', mode)
250
251
252        # NetCDF file definition
253        fid = NetCDFFile(self.filename, mode)
254
255        if mode == 'w':
256
257            #Create new file
258            fid.institution = 'Geoscience Australia'
259            fid.description = 'Output from pyvolution suitable for plotting'
260
261            if domain.smooth:
262                fid.smoothing = 'Yes'
263            else:
264                fid.smoothing = 'No'
265
266            fid.order = domain.default_order
267
268            if hasattr(domain, 'texture'):
269                fid.texture = domain.texture
270        #else:
271            #    fid.texture = 'None'
272
273            #Reference point
274            #Start time in seconds since the epoch (midnight 1/1/1970)
275            #FIXME: Use Georef
276            fid.starttime = domain.starttime
277
278            # dimension definitions
279            fid.createDimension('number_of_volumes', self.number_of_volumes)
280            fid.createDimension('number_of_vertices', 3)
281
282            if domain.smooth is True:
283                fid.createDimension('number_of_points', len(domain.vertexlist))
284            else:
285                fid.createDimension('number_of_points', 3*self.number_of_volumes)
286
287            fid.createDimension('number_of_timesteps', None) #extensible
288
289            # variable definitions
290            fid.createVariable('x', self.precision, ('number_of_points',))
291            fid.createVariable('y', self.precision, ('number_of_points',))
292            fid.createVariable('elevation', self.precision, ('number_of_points',))
293            if domain.geo_reference is not None:
294                domain.geo_reference.write_NetCDF(fid)
295
296            #FIXME: Backwards compatibility
297            fid.createVariable('z', self.precision, ('number_of_points',))
298            #################################
299
300            fid.createVariable('volumes', Int, ('number_of_volumes',
301                                                'number_of_vertices'))
302
303            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
304                                               # close to each other may appear as the same step
305                               ('number_of_timesteps',))
306
307            fid.createVariable('stage', self.precision,
308                               ('number_of_timesteps',
309                                'number_of_points'))
310
311            fid.createVariable('xmomentum', self.precision,
312                               ('number_of_timesteps',
313                                'number_of_points'))
314
315            fid.createVariable('ymomentum', self.precision,
316                               ('number_of_timesteps',
317                                'number_of_points'))
318
319        #Close
320        fid.close()
321
322
323    def store_connectivity(self):
324        """Specialisation of store_connectivity for net CDF format
325
326        Writes x,y,z coordinates of triangles constituting
327        the bed elevation.
328        """
329
330        from Scientific.IO.NetCDF import NetCDFFile
331
332        from Numeric import concatenate, Int
333
334        domain = self.domain
335
336        #Get NetCDF
337        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
338
339        # Get the variables
340        x = fid.variables['x']
341        y = fid.variables['y']
342        z = fid.variables['elevation']
343
344        volumes = fid.variables['volumes']
345
346        # Get X, Y and bed elevation Z
347        Q = domain.quantities['elevation']
348        X,Y,Z,V = Q.get_vertex_values(xy=True,
349                      precision = self.precision)
350
351
352
353        x[:] = X.astype(self.precision)
354        y[:] = Y.astype(self.precision)
355        z[:] = Z.astype(self.precision)
356
357        #FIXME: Backwards compatibility
358        z = fid.variables['z']
359        z[:] = Z.astype(self.precision)
360        ################################
361
362        volumes[:] = V.astype(volumes.typecode())
363
364        #Close
365        fid.close()
366
367    def store_timestep(self, names):
368        """Store time and named quantities to file
369        """
370        from Scientific.IO.NetCDF import NetCDFFile
371        import types
372        from time import sleep
373        from os import stat
374
375        #minimum_allowed_depth = 0.001
376        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
377        from Numeric import choose
378
379        #Get NetCDF
380        retries = 0
381        file_open = False
382        while not file_open and retries < 10:
383            try:
384                fid = NetCDFFile(self.filename, 'a') #Open existing file
385            except IOError:
386                #This could happen if someone was reading the file.
387                #In that case, wait a while and try again
388                msg = 'Warning (store_timestep): File %s could not be opened'\
389                      %self.filename
390                msg += ' - trying step %s again' %self.domain.time
391                print msg
392                retries += 1
393                sleep(1)
394            else:
395                file_open = True
396
397        if not file_open:
398            msg = 'File %s could not be opened for append' %self.filename
399            raise DataFileNotOpenError, msg
400
401
402
403        #Check to see if the file is already too big:
404        time = fid.variables['time']
405        i = len(time)+1
406        file_size = stat(self.filename)[6]
407        file_size_increase =  file_size/i
408        if file_size + file_size_increase > self.max_size*(2**self.recursion):
409            #in order to get the file name and start time correct,
410            #I change the domian.filename and domain.starttime.
411            #This is the only way to do this without changing
412            #other modules (I think).
413
414            #write a filename addon that won't break swollens reader
415            #(10.sww is bad)
416            filename_ext = '_time_%s'%self.domain.time
417            filename_ext = filename_ext.replace('.', '_')
418            #remember the old filename, then give domain a
419            #name with the extension
420            old_domain_filename = self.domain.filename
421            if not self.recursion:
422                self.domain.filename = self.domain.filename+filename_ext
423
424            #change the domain starttime to the current time
425            old_domain_starttime = self.domain.starttime
426            self.domain.starttime = self.domain.time
427
428            #build a new data_structure.
429            next_data_structure=\
430                Data_format_sww(self.domain, mode=self.mode,\
431                                max_size = self.max_size,\
432                                recursion = self.recursion+1)
433            if not self.recursion:
434                print '    file_size = %s'%file_size
435                print '    saving file to %s'%next_data_structure.filename
436            #set up the new data_structure
437            self.domain.writer = next_data_structure
438
439            #FIXME - could be cleaner to use domain.store_timestep etc.
440            next_data_structure.store_connectivity()
441            next_data_structure.store_timestep(names)
442            fid.sync()
443            fid.close()
444
445            #restore the old starttime and filename
446            self.domain.starttime = old_domain_starttime
447            self.domain.filename = old_domain_filename
448        else:
449            self.recursion = False
450            domain = self.domain
451
452            # Get the variables
453            time = fid.variables['time']
454            stage = fid.variables['stage']
455            xmomentum = fid.variables['xmomentum']
456            ymomentum = fid.variables['ymomentum']
457            i = len(time)
458
459            #Store time
460            time[i] = self.domain.time
461
462
463            if type(names) not in [types.ListType, types.TupleType]:
464                names = [names]
465
466            for name in names:
467                # Get quantity
468                Q = domain.quantities[name]
469                A,V = Q.get_vertex_values(xy = False,
470                                          precision = self.precision)
471
472                #FIXME: Make this general (see below)
473                if name == 'stage':
474                    z = fid.variables['elevation']
475                    #print z[:]
476                    #print A-z[:]
477                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
478                    stage[i,:] = A.astype(self.precision)
479                elif name == 'xmomentum':
480                    xmomentum[i,:] = A.astype(self.precision)
481                elif name == 'ymomentum':
482                    ymomentum[i,:] = A.astype(self.precision)
483
484                #As in....
485            #eval( name + '[i,:] = A.astype(self.precision)' )
486            #FIXME: But we need a UNIT test for that before refactoring
487
488
489
490            #Flush and close
491            fid.sync()
492            fid.close()
493
494
495
496#Class for handling checkpoints data
497class Data_format_cpt(Data_format):
498    """Interface to native NetCDF format (.cpt)
499    """
500
501
502    def __init__(self, domain, mode = 'w'):
503        from Scientific.IO.NetCDF import NetCDFFile
504        from Numeric import Int, Float, Float
505
506        self.precision = Float #Use full precision
507
508        Data_format.__init__(self, domain, 'sww', mode)
509
510
511        # NetCDF file definition
512        fid = NetCDFFile(self.filename, mode)
513
514        if mode == 'w':
515            #Create new file
516            fid.institution = 'Geoscience Australia'
517            fid.description = 'Checkpoint data'
518            #fid.smooth = domain.smooth
519            fid.order = domain.default_order
520
521            # dimension definitions
522            fid.createDimension('number_of_volumes', self.number_of_volumes)
523            fid.createDimension('number_of_vertices', 3)
524
525            #Store info at all vertices (no smoothing)
526            fid.createDimension('number_of_points', 3*self.number_of_volumes)
527            fid.createDimension('number_of_timesteps', None) #extensible
528
529            # variable definitions
530
531            #Mesh
532            fid.createVariable('x', self.precision, ('number_of_points',))
533            fid.createVariable('y', self.precision, ('number_of_points',))
534
535
536            fid.createVariable('volumes', Int, ('number_of_volumes',
537                                                'number_of_vertices'))
538
539            fid.createVariable('time', self.precision,
540                               ('number_of_timesteps',))
541
542            #Allocate space for all quantities
543            for name in domain.quantities.keys():
544                fid.createVariable(name, self.precision,
545                                   ('number_of_timesteps',
546                                    'number_of_points'))
547
548        #Close
549        fid.close()
550
551
552    def store_checkpoint(self):
553        """
554        Write x,y coordinates of triangles.
555        Write connectivity (
556        constituting
557        the bed elevation.
558        """
559
560        from Scientific.IO.NetCDF import NetCDFFile
561
562        from Numeric import concatenate
563
564        domain = self.domain
565
566        #Get NetCDF
567        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
568
569        # Get the variables
570        x = fid.variables['x']
571        y = fid.variables['y']
572
573        volumes = fid.variables['volumes']
574
575        # Get X, Y and bed elevation Z
576        Q = domain.quantities['elevation']
577        X,Y,Z,V = Q.get_vertex_values(xy=True,
578                      precision = self.precision)
579
580
581
582        x[:] = X.astype(self.precision)
583        y[:] = Y.astype(self.precision)
584        z[:] = Z.astype(self.precision)
585
586        volumes[:] = V
587
588        #Close
589        fid.close()
590
591
592    def store_timestep(self, name):
593        """Store time and named quantity to file
594        """
595        from Scientific.IO.NetCDF import NetCDFFile
596        from time import sleep
597
598        #Get NetCDF
599        retries = 0
600        file_open = False
601        while not file_open and retries < 10:
602            try:
603                fid = NetCDFFile(self.filename, 'a') #Open existing file
604            except IOError:
605                #This could happen if someone was reading the file.
606                #In that case, wait a while and try again
607                msg = 'Warning (store_timestep): File %s could not be opened'\
608                  %self.filename
609                msg += ' - trying again'
610                print msg
611                retries += 1
612                sleep(1)
613            else:
614                file_open = True
615
616        if not file_open:
617            msg = 'File %s could not be opened for append' %self.filename
618            raise DataFileNotOPenError, msg
619
620
621        domain = self.domain
622
623        # Get the variables
624        time = fid.variables['time']
625        stage = fid.variables['stage']
626        i = len(time)
627
628        #Store stage
629        time[i] = self.domain.time
630
631        # Get quantity
632        Q = domain.quantities[name]
633        A,V = Q.get_vertex_values(xy=False,
634                  precision = self.precision)
635
636        stage[i,:] = A.astype(self.precision)
637
638        #Flush and close
639        fid.sync()
640        fid.close()
641
642
643
644
645
646#Function for storing xya output
647#FIXME Not done yet for this version
648class Data_format_xya(Data_format):
649    """Generic interface to data formats
650    """
651
652
653    def __init__(self, domain, mode = 'w'):
654        from Scientific.IO.NetCDF import NetCDFFile
655        from Numeric import Int, Float, Float32
656
657        self.precision = Float32 #Use single precision
658
659        Data_format.__init__(self, domain, 'xya', mode)
660
661
662
663    #FIXME -This is the old xya format
664    def store_all(self):
665        """Specialisation of store all for xya format
666
667        Writes x,y,z coordinates of triangles constituting
668        the bed elevation.
669        """
670
671        from Numeric import concatenate
672
673        domain = self.domain
674
675        fd = open(self.filename, 'w')
676
677
678        if domain.smooth is True:
679            number_of_points =  len(domain.vertexlist)
680        else:
681            number_of_points = 3*self.number_of_volumes
682
683        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
684
685        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
686                 " # <vertex #> <x> <y> [attributes]" + "\n")
687
688
689        # Get X, Y, bed elevation and friction (index=0,1)
690        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
691                                           indices = (0,1), precision = self.precision)
692
693        bed_eles = A[:,0]
694        fricts = A[:,1]
695
696        # Get stage (index=0)
697        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
698                                       indices = (0,), precision = self.precision)
699
700        stages = B[:,0]
701
702        #<vertex #> <x> <y> [attributes]
703        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
704                                               stages, fricts):
705
706            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
707            fd.write(s)
708
709        #close
710        fd.close()
711
712
713    def store_timestep(self, t, V0, V1, V2):
714        """Store time, water heights (and momentums) to file
715        """
716        pass
717
718
719#Auxiliary
720def write_obj(filename,x,y,z):
721    """Store x,y,z vectors into filename (obj format)
722       Vectors are assumed to have dimension (M,3) where
723       M corresponds to the number elements.
724       triangles are assumed to be disconnected
725
726       The three numbers in each vector correspond to three vertices,
727
728       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
729
730    """
731    #print 'Writing obj to %s' % filename
732
733    import os.path
734
735    root, ext = os.path.splitext(filename)
736    if ext == '.obj':
737        FN = filename
738    else:
739        FN = filename + '.obj'
740
741
742    outfile = open(FN, 'wb')
743    outfile.write("# Triangulation as an obj file\n")
744
745    M, N = x.shape
746    assert N==3  #Assuming three vertices per element
747
748    for i in range(M):
749        for j in range(N):
750            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
751
752    for i in range(M):
753        base = i*N
754        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
755
756    outfile.close()
757
758
759#########################################################
760#Conversion routines
761########################################################
762
763def sww2obj(basefilename, size):
764    """Convert netcdf based data output to obj
765    """
766    from Scientific.IO.NetCDF import NetCDFFile
767
768    from Numeric import Float, zeros
769
770    #Get NetCDF
771    FN = create_filename('.', basefilename, 'sww', size)
772    print 'Reading from ', FN
773    fid = NetCDFFile(FN, 'r')  #Open existing file for read
774
775
776    # Get the variables
777    x = fid.variables['x']
778    y = fid.variables['y']
779    z = fid.variables['elevation']
780    time = fid.variables['time']
781    stage = fid.variables['stage']
782
783    M = size  #Number of lines
784    xx = zeros((M,3), Float)
785    yy = zeros((M,3), Float)
786    zz = zeros((M,3), Float)
787
788    for i in range(M):
789        for j in range(3):
790            xx[i,j] = x[i+j*M]
791            yy[i,j] = y[i+j*M]
792            zz[i,j] = z[i+j*M]
793
794    #Write obj for bathymetry
795    FN = create_filename('.', basefilename, 'obj', size)
796    write_obj(FN,xx,yy,zz)
797
798
799    #Now read all the data with variable information, combine with
800    #x,y info and store as obj
801
802    for k in range(len(time)):
803        t = time[k]
804        print 'Processing timestep %f' %t
805
806        for i in range(M):
807            for j in range(3):
808                zz[i,j] = stage[k,i+j*M]
809
810
811        #Write obj for variable data
812        #FN = create_filename(basefilename, 'obj', size, time=t)
813        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
814        write_obj(FN,xx,yy,zz)
815
816
817def dat2obj(basefilename):
818    """Convert line based data output to obj
819    FIXME: Obsolete?
820    """
821
822    import glob, os
823    from config import data_dir
824
825
826    #Get bathymetry and x,y's
827    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
828
829    from Numeric import zeros, Float
830
831    M = len(lines)  #Number of lines
832    x = zeros((M,3), Float)
833    y = zeros((M,3), Float)
834    z = zeros((M,3), Float)
835
836    ##i = 0
837    for i, line in enumerate(lines):
838        tokens = line.split()
839        values = map(float,tokens)
840
841        for j in range(3):
842            x[i,j] = values[j*3]
843            y[i,j] = values[j*3+1]
844            z[i,j] = values[j*3+2]
845
846        ##i += 1
847
848
849    #Write obj for bathymetry
850    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
851
852
853    #Now read all the data files with variable information, combine with
854    #x,y info
855    #and store as obj
856
857    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
858
859    for filename in files:
860        print 'Processing %s' % filename
861
862        lines = open(data_dir+os.sep+filename,'r').readlines()
863        assert len(lines) == M
864        root, ext = os.path.splitext(filename)
865
866        #Get time from filename
867        i0 = filename.find('_time=')
868        if i0 == -1:
869            #Skip bathymetry file
870            continue
871
872        i0 += 6  #Position where time starts
873        i1 = filename.find('.dat')
874
875        if i1 > i0:
876            t = float(filename[i0:i1])
877        else:
878            raise DataTimeError, 'Hmmmm'
879
880
881
882        ##i = 0
883        for i, line in enumerate(lines):
884            tokens = line.split()
885            values = map(float,tokens)
886
887            for j in range(3):
888                z[i,j] = values[j]
889
890            ##i += 1
891
892        #Write obj for variable data
893        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
894
895
896def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
897    """Read netcdf filename1, pick timesteps first:step:last and save to
898    nettcdf file filename2
899    """
900    from Scientific.IO.NetCDF import NetCDFFile
901
902    #Get NetCDF
903    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
904    outfile = NetCDFFile(filename2, 'w')  #Open new file
905
906
907    #Copy dimensions
908    for d in infile.dimensions:
909        outfile.createDimension(d, infile.dimensions[d])
910
911    for name in infile.variables:
912        var = infile.variables[name]
913        outfile.createVariable(name, var.typecode(), var.dimensions)
914
915
916    #Copy the static variables
917    for name in infile.variables:
918        if name == 'time' or name == 'stage':
919            pass
920        else:
921            #Copy
922            outfile.variables[name][:] = infile.variables[name][:]
923
924    #Copy selected timesteps
925    time = infile.variables['time']
926    stage = infile.variables['stage']
927
928    newtime = outfile.variables['time']
929    newstage = outfile.variables['stage']
930
931    if last is None:
932        last = len(time)
933
934    selection = range(first, last, step)
935    for i, j in enumerate(selection):
936        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
937        newtime[i] = time[j]
938        newstage[i,:] = stage[j,:]
939
940    #Close
941    infile.close()
942    outfile.close()
943
944
945#Get data objects
946def get_dataobject(domain, mode='w'):
947    """Return instance of class of given format using filename
948    """
949
950    cls = eval('Data_format_%s' %domain.format)
951    return cls(domain, mode)
952
953def xya2pts(basename_in, basename_out=None, verbose=False,
954            #easting_min=None, easting_max=None,
955            #northing_min=None, northing_max=None,
956            stride = 1,
957            attribute_name = 'elevation',
958            z_func = None):
959    """Read points data from ascii (.xya)
960
961    Example:
962
963              x(m)        y(m)        z(m)
964         0.00000e+00  0.00000e+00  1.3535000e-01
965         0.00000e+00  1.40000e-02  1.3535000e-01
966
967
968
969    Convert to NetCDF pts format which is
970
971    points:  (Nx2) Float array
972    elevation: N Float array
973
974    Only lines that contain three numeric values are processed
975
976    If z_func is specified, it will be applied to the third field
977    """
978
979    import os
980    #from Scientific.IO.NetCDF import NetCDFFile
981    from Numeric import Float, arrayrange, concatenate
982
983    root, ext = os.path.splitext(basename_in)
984
985    if ext == '': ext = '.xya'
986
987    #Get NetCDF
988    infile = open(root + ext, 'r')  #Open existing xya file for read
989
990    if verbose: print 'Reading xya points from %s' %(root + ext)
991
992    points = []
993    attribute = []
994    for i, line in enumerate(infile.readlines()):
995
996        if i % stride != 0: continue
997
998        fields = line.split()
999
1000        try:
1001            assert len(fields) == 3
1002        except:
1003            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1004
1005        try:
1006            x = float( fields[0] )
1007            y = float( fields[1] )
1008            z = float( fields[2] )
1009        except:
1010            continue
1011
1012        points.append( [x, y] )
1013
1014        if callable(z_func):
1015            attribute.append(z_func(z))
1016        else:
1017            attribute.append(z)
1018
1019
1020    #Get output file
1021    if basename_out == None:
1022        ptsname = root + '.pts'
1023    else:
1024        ptsname = basename_out + '.pts'
1025
1026    if verbose: print 'Store to NetCDF file %s' %ptsname
1027    write_ptsfile(ptsname, points, attribute, attribute_name)
1028
1029
1030
1031######Obsoleted by export_points in load_mesh
1032def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1033                  zone=None, xllcorner=None, yllcorner=None):
1034    """Write points and associated attribute to pts (NetCDF) format
1035    """
1036
1037    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1038
1039    from Numeric import Float
1040
1041    if attribute_name is None:
1042        attribute_name = 'attribute'
1043
1044
1045    from Scientific.IO.NetCDF import NetCDFFile
1046
1047    # NetCDF file definition
1048    outfile = NetCDFFile(ptsname, 'w')
1049
1050
1051    #Create new file
1052    outfile.institution = 'Geoscience Australia'
1053    outfile.description = 'NetCDF pts format for compact and '\
1054                          'portable storage of spatial point data'
1055
1056
1057    #Georeferencing
1058    from coordinate_transforms.geo_reference import Geo_reference
1059    if zone is None:
1060        assert xllcorner is None, 'xllcorner must be None'
1061        assert yllcorner is None, 'yllcorner must be None'
1062        Geo_reference().write_NetCDF(outfile)
1063    else:
1064        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1065
1066
1067
1068    outfile.createDimension('number_of_points', len(points))
1069    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1070
1071    # variable definitions
1072    outfile.createVariable('points', Float, ('number_of_points',
1073                                             'number_of_dimensions'))
1074    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1075
1076    # Get handles to the variables
1077    nc_points = outfile.variables['points']
1078    nc_attribute = outfile.variables[attribute_name]
1079
1080    #Store data
1081    nc_points[:, :] = points
1082    nc_attribute[:] = attribute
1083
1084    outfile.close()
1085
1086
1087def dem2pts(basename_in, basename_out=None,
1088            easting_min=None, easting_max=None,
1089            northing_min=None, northing_max=None,
1090            use_cache=False, verbose=False,):
1091    """Read Digitial Elevation model from the following NetCDF format (.dem)
1092
1093    Example:
1094
1095    ncols         3121
1096    nrows         1800
1097    xllcorner     722000
1098    yllcorner     5893000
1099    cellsize      25
1100    NODATA_value  -9999
1101    138.3698 137.4194 136.5062 135.5558 ..........
1102
1103    Convert to NetCDF pts format which is
1104
1105    points:  (Nx2) Float array
1106    elevation: N Float array
1107    """
1108
1109
1110
1111    kwargs = {'basename_out': basename_out,
1112              'easting_min': easting_min,
1113              'easting_max': easting_max,
1114              'northing_min': northing_min,
1115              'northing_max': northing_max,
1116              'verbose': verbose}
1117
1118    if use_cache is True:
1119        from caching import cache
1120        result = cache(_dem2pts, basename_in, kwargs,
1121                       dependencies = [basename_in + '.dem'],
1122                       verbose = verbose)
1123
1124    else:
1125        result = apply(_dem2pts, [basename_in], kwargs)
1126
1127    return result
1128
1129
1130def _dem2pts(basename_in, basename_out=None, verbose=False,
1131            easting_min=None, easting_max=None,
1132            northing_min=None, northing_max=None):
1133    """Read Digitial Elevation model from the following NetCDF format (.dem)
1134
1135    Internal function. See public function dem2pts for details.
1136    """
1137
1138    #FIXME: Can this be written feasibly using write_pts?
1139
1140    import os
1141    from Scientific.IO.NetCDF import NetCDFFile
1142    from Numeric import Float, zeros, reshape, sum
1143
1144    root = basename_in
1145
1146    #Get NetCDF
1147    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1148
1149    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1150
1151    ncols = infile.ncols[0]
1152    nrows = infile.nrows[0]
1153    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1154    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1155    cellsize = infile.cellsize[0]
1156    NODATA_value = infile.NODATA_value[0]
1157    dem_elevation = infile.variables['elevation']
1158
1159    zone = infile.zone[0]
1160    false_easting = infile.false_easting[0]
1161    false_northing = infile.false_northing[0]
1162
1163    #Text strings
1164    projection = infile.projection
1165    datum = infile.datum
1166    units = infile.units
1167
1168
1169    #Get output file
1170    if basename_out == None:
1171        ptsname = root + '.pts'
1172    else:
1173        ptsname = basename_out + '.pts'
1174
1175    if verbose: print 'Store to NetCDF file %s' %ptsname
1176    # NetCDF file definition
1177    outfile = NetCDFFile(ptsname, 'w')
1178
1179    #Create new file
1180    outfile.institution = 'Geoscience Australia'
1181    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1182                      'of spatial point data'
1183    #assign default values
1184    if easting_min is None: easting_min = xllcorner
1185    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1186    if northing_min is None: northing_min = yllcorner
1187    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1188
1189    #compute offsets to update georeferencing
1190    easting_offset = xllcorner - easting_min
1191    northing_offset = yllcorner - northing_min
1192
1193    #Georeferencing
1194    outfile.zone = zone
1195    outfile.xllcorner = easting_min #Easting of lower left corner
1196    outfile.yllcorner = northing_min #Northing of lower left corner
1197    outfile.false_easting = false_easting
1198    outfile.false_northing = false_northing
1199
1200    outfile.projection = projection
1201    outfile.datum = datum
1202    outfile.units = units
1203
1204
1205    #Grid info (FIXME: probably not going to be used, but heck)
1206    outfile.ncols = ncols
1207    outfile.nrows = nrows
1208
1209    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1210    totalnopoints = nrows*ncols
1211
1212    # calculating number of NODATA_values for each row in clipped region
1213    #FIXME: use array operations to do faster
1214    nn = 0
1215    k = 0
1216    i1_0 = 0
1217    j1_0 = 0
1218    thisj = 0
1219    thisi = 0
1220    for i in range(nrows):
1221        y = (nrows-i-1)*cellsize + yllcorner
1222        for j in range(ncols):
1223            x = j*cellsize + xllcorner
1224            if easting_min <= x <= easting_max and \
1225               northing_min <= y <= northing_max:
1226                thisj = j
1227                thisi = i
1228                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1229
1230                if k == 0:
1231                    i1_0 = i
1232                    j1_0 = j
1233                k += 1
1234
1235    index1 = j1_0
1236    index2 = thisj
1237
1238    # dimension definitions
1239    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1240    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1241
1242    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1243    nopoints = clippednopoints-nn
1244
1245    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1246
1247    if verbose and nn > 0:
1248        print 'There are %d values in the elevation' %totalnopoints
1249        print 'There are %d values in the clipped elevation' %clippednopoints
1250        print 'There are %d NODATA_values in the clipped elevation' %nn
1251
1252    outfile.createDimension('number_of_points', nopoints)
1253    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1254
1255    # variable definitions
1256    outfile.createVariable('points', Float, ('number_of_points',
1257                                             'number_of_dimensions'))
1258    outfile.createVariable('elevation', Float, ('number_of_points',))
1259
1260    # Get handles to the variables
1261    points = outfile.variables['points']
1262    elevation = outfile.variables['elevation']
1263
1264    lenv = index2-index1+1
1265    #Store data
1266    global_index = 0
1267    #for i in range(nrows):
1268    for i in range(i1_0,thisi+1,1):
1269        if verbose and i%((nrows+10)/10)==0:
1270            print 'Processing row %d of %d' %(i, nrows)
1271
1272        lower_index = global_index
1273
1274        v = dem_elevation_r[i,index1:index2+1]
1275        no_NODATA = sum(v == NODATA_value)
1276        if no_NODATA > 0:
1277            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1278        else:
1279            newcols = lenv #ncols_in_bounding_box
1280
1281        telev = zeros(newcols, Float)
1282        tpoints = zeros((newcols, 2), Float)
1283
1284        local_index = 0
1285
1286        y = (nrows-i-1)*cellsize + yllcorner
1287        #for j in range(ncols):
1288        for j in range(j1_0,index2+1,1):
1289
1290            x = j*cellsize + xllcorner
1291            if easting_min <= x <= easting_max and \
1292               northing_min <= y <= northing_max and \
1293               dem_elevation_r[i,j] <> NODATA_value:
1294                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1295                telev[local_index] = dem_elevation_r[i, j]
1296                global_index += 1
1297                local_index += 1
1298
1299        upper_index = global_index
1300
1301        if upper_index == lower_index + newcols:
1302            points[lower_index:upper_index, :] = tpoints
1303            elevation[lower_index:upper_index] = telev
1304
1305    assert global_index == nopoints, 'index not equal to number of points'
1306
1307    infile.close()
1308    outfile.close()
1309
1310
1311
1312def _read_hecras_cross_sections(lines):
1313    """Return block of surface lines for each cross section
1314    Starts with SURFACE LINE,
1315    Ends with END CROSS-SECTION
1316    """
1317
1318    points = []
1319
1320    reading_surface = False
1321    for i, line in enumerate(lines):
1322
1323        if len(line.strip()) == 0:    #Ignore blanks
1324            continue
1325
1326        if lines[i].strip().startswith('SURFACE LINE'):
1327            reading_surface = True
1328            continue
1329
1330        if lines[i].strip().startswith('END') and reading_surface:
1331            yield points
1332            reading_surface = False
1333            points = []
1334
1335        if reading_surface:
1336            fields = line.strip().split(',')
1337            easting = float(fields[0])
1338            northing = float(fields[1])
1339            elevation = float(fields[2])
1340            points.append([easting, northing, elevation])
1341
1342
1343
1344
1345def hecras_cross_sections2pts(basename_in,
1346                              basename_out=None,
1347                              verbose=False):
1348    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1349
1350    Example:
1351
1352
1353# RAS export file created on Mon 15Aug2005 11:42
1354# by HEC-RAS Version 3.1.1
1355
1356BEGIN HEADER:
1357  UNITS: METRIC
1358  DTM TYPE: TIN
1359  DTM: v:\1\cit\perth_topo\river_tin
1360  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1361  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1362  MAP PROJECTION: UTM
1363  PROJECTION ZONE: 50
1364  DATUM: AGD66
1365  VERTICAL DATUM:
1366  NUMBER OF REACHES:  19
1367  NUMBER OF CROSS-SECTIONS:  14206
1368END HEADER:
1369
1370
1371Only the SURFACE LINE data of the following form will be utilised
1372
1373  CROSS-SECTION:
1374    STREAM ID:Southern-Wungong
1375    REACH ID:Southern-Wungong
1376    STATION:19040.*
1377    CUT LINE:
1378      405548.671603161 , 6438142.7594925
1379      405734.536092045 , 6438326.10404912
1380      405745.130459356 , 6438331.48627354
1381      405813.89633823 , 6438368.6272789
1382    SURFACE LINE:
1383     405548.67,   6438142.76,   35.37
1384     405552.24,   6438146.28,   35.41
1385     405554.78,   6438148.78,   35.44
1386     405555.80,   6438149.79,   35.44
1387     405559.37,   6438153.31,   35.45
1388     405560.88,   6438154.81,   35.44
1389     405562.93,   6438156.83,   35.42
1390     405566.50,   6438160.35,   35.38
1391     405566.99,   6438160.83,   35.37
1392     ...
1393   END CROSS-SECTION
1394
1395    Convert to NetCDF pts format which is
1396
1397    points:  (Nx2) Float array
1398    elevation: N Float array
1399    """
1400
1401    #FIXME: Can this be written feasibly using write_pts?
1402
1403    import os
1404    from Scientific.IO.NetCDF import NetCDFFile
1405    from Numeric import Float, zeros, reshape
1406
1407    root = basename_in
1408
1409    #Get ASCII file
1410    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1411
1412    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1413
1414    lines = infile.readlines()
1415    infile.close()
1416
1417    if verbose: print 'Converting to pts format'
1418
1419    i = 0
1420    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1421        i += 1
1422
1423    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('UNITS:')
1427    units = lines[i].strip().split()[1]
1428    i += 1
1429
1430    assert lines[i].strip().upper().startswith('DTM TYPE:')
1431    i += 1
1432
1433    assert lines[i].strip().upper().startswith('DTM:')
1434    i += 1
1435
1436    assert lines[i].strip().upper().startswith('STREAM')
1437    i += 1
1438
1439    assert lines[i].strip().upper().startswith('CROSS')
1440    i += 1
1441
1442    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1443    projection = lines[i].strip().split(':')[1]
1444    i += 1
1445
1446    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1447    zone = int(lines[i].strip().split(':')[1])
1448    i += 1
1449
1450    assert lines[i].strip().upper().startswith('DATUM:')
1451    datum = lines[i].strip().split(':')[1]
1452    i += 1
1453
1454    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1455    i += 1
1456
1457    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1458    i += 1
1459
1460    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1461    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1462    i += 1
1463
1464
1465    #Now read all points
1466    points = []
1467    elevation = []
1468    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1469        for k, entry in enumerate(entries):
1470            points.append(entry[:2])
1471            elevation.append(entry[2])
1472
1473
1474    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1475          %(j+1, number_of_cross_sections)
1476    assert j+1 == number_of_cross_sections, msg
1477
1478    #Get output file
1479    if basename_out == None:
1480        ptsname = root + '.pts'
1481    else:
1482        ptsname = basename_out + '.pts'
1483
1484    #FIXME (DSG-ON): use loadASCII export_points_file
1485    if verbose: print 'Store to NetCDF file %s' %ptsname
1486    # NetCDF file definition
1487    outfile = NetCDFFile(ptsname, 'w')
1488
1489    #Create new file
1490    outfile.institution = 'Geoscience Australia'
1491    outfile.description = 'NetCDF pts format for compact and portable ' +\
1492                          'storage of spatial point data derived from HEC-RAS'
1493
1494    #Georeferencing
1495    outfile.zone = zone
1496    outfile.xllcorner = 0.0
1497    outfile.yllcorner = 0.0
1498    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1499    outfile.false_northing = 1000000
1500
1501    outfile.projection = projection
1502    outfile.datum = datum
1503    outfile.units = units
1504
1505
1506    outfile.createDimension('number_of_points', len(points))
1507    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1508
1509    # variable definitions
1510    outfile.createVariable('points', Float, ('number_of_points',
1511                                             'number_of_dimensions'))
1512    outfile.createVariable('elevation', Float, ('number_of_points',))
1513
1514    # Get handles to the variables
1515    outfile.variables['points'][:] = points
1516    outfile.variables['elevation'][:] = elevation
1517
1518    outfile.close()
1519
1520
1521
1522def sww2dem(basename_in, basename_out = None,
1523            quantity = None,
1524            timestep = None,
1525            reduction = None,
1526            cellsize = 10,
1527            NODATA_value = -9999,
1528            easting_min = None,
1529            easting_max = None,
1530            northing_min = None,
1531            northing_max = None,
1532            verbose = False,
1533            origin = None,
1534            datum = 'WGS84',
1535        format = 'ers'):
1536
1537    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1538
1539    Example (ASC):
1540
1541    ncols         3121
1542    nrows         1800
1543    xllcorner     722000
1544    yllcorner     5893000
1545    cellsize      25
1546    NODATA_value  -9999
1547    138.3698 137.4194 136.5062 135.5558 ..........
1548
1549    Also write accompanying file with same basename_in but extension .prj
1550    used to fix the UTM zone, datum, false northings and eastings.
1551
1552    The prj format is assumed to be as
1553
1554    Projection    UTM
1555    Zone          56
1556    Datum         WGS84
1557    Zunits        NO
1558    Units         METERS
1559    Spheroid      WGS84
1560    Xshift        0.0000000000
1561    Yshift        10000000.0000000000
1562    Parameters
1563
1564
1565    The parameter quantity must be the name of an existing quantity or
1566    an expression involving existing quantities. The default is
1567    'elevation'.
1568
1569    if timestep (an index) is given, output quantity at that timestep
1570
1571    if reduction is given use that to reduce quantity over all timesteps.
1572
1573    datum
1574
1575    format can be either 'asc' or 'ers'
1576    """
1577
1578    import sys
1579    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1580    from Numeric import array2string
1581
1582    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1583    from util import apply_expression_to_dictionary
1584
1585    msg = 'Format must be either asc or ers'
1586    assert format.lower() in ['asc', 'ers'], msg
1587
1588
1589    false_easting = 500000
1590    false_northing = 10000000
1591
1592    if quantity is None:
1593        quantity = 'elevation'
1594
1595    if reduction is None:
1596        reduction = max
1597
1598    if basename_out is None:
1599        basename_out = basename_in + '_%s' %quantity
1600
1601    swwfile = basename_in + '.sww'
1602    demfile = basename_out + '.' + format
1603    # Note the use of a .ers extension is optional (write_ermapper_grid will
1604    # deal with either option
1605
1606    #Read sww file
1607    if verbose: print 'Reading from %s' %swwfile
1608    from Scientific.IO.NetCDF import NetCDFFile
1609    fid = NetCDFFile(swwfile)
1610
1611    #Get extent and reference
1612    x = fid.variables['x'][:]
1613    y = fid.variables['y'][:]
1614    volumes = fid.variables['volumes'][:]
1615
1616    number_of_timesteps = fid.dimensions['number_of_timesteps']
1617    number_of_points = fid.dimensions['number_of_points']
1618    if origin is None:
1619
1620        #Get geo_reference
1621        #sww files don't have to have a geo_ref
1622        try:
1623            geo_reference = Geo_reference(NetCDFObject=fid)
1624        except AttributeError, e:
1625            geo_reference = Geo_reference() #Default georef object
1626
1627        xllcorner = geo_reference.get_xllcorner()
1628        yllcorner = geo_reference.get_yllcorner()
1629        zone = geo_reference.get_zone()
1630    else:
1631        zone = origin[0]
1632        xllcorner = origin[1]
1633        yllcorner = origin[2]
1634
1635
1636
1637    #FIXME: Refactor using code from file_function.statistics
1638    #Something like print swwstats(swwname)
1639    if verbose:
1640        x = fid.variables['x'][:]
1641        y = fid.variables['y'][:]
1642        times = fid.variables['time'][:]
1643        print '------------------------------------------------'
1644        print 'Statistics of SWW file:'
1645        print '  Name: %s' %swwfile
1646        print '  Reference:'
1647        print '    Lower left corner: [%f, %f]'\
1648              %(xllcorner, yllcorner)
1649        print '    Start time: %f' %fid.starttime[0]
1650        print '  Extent:'
1651        print '    x [m] in [%f, %f], len(x) == %d'\
1652              %(min(x.flat), max(x.flat), len(x.flat))
1653        print '    y [m] in [%f, %f], len(y) == %d'\
1654              %(min(y.flat), max(y.flat), len(y.flat))
1655        print '    t [s] in [%f, %f], len(t) == %d'\
1656              %(min(times), max(times), len(times))
1657        print '  Quantities [SI units]:'
1658        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1659            q = fid.variables[name][:].flat
1660            print '    %s in [%f, %f]' %(name, min(q), max(q))
1661
1662
1663
1664
1665
1666    # Get quantity and reduce if applicable
1667    if verbose: print 'Processing quantity %s' %quantity
1668
1669    # Turn NetCDF objects into Numeric arrays
1670    quantity_dict = {}
1671    for name in fid.variables.keys():
1672        quantity_dict[name] = fid.variables[name][:]
1673
1674
1675
1676    # Convert quantity expression to quantities found in sww file   
1677    q = apply_expression_to_dictionary(quantity, quantity_dict)
1678
1679
1680
1681    if len(q.shape) == 2:
1682        #q has a time component and needs to be reduced along
1683        #the temporal dimension
1684        if verbose: print 'Reducing quantity %s' %quantity
1685        q_reduced = zeros( number_of_points, Float )
1686
1687        for k in range(number_of_points):
1688            q_reduced[k] = reduction( q[:,k] )
1689
1690        q = q_reduced
1691
1692    #Post condition: Now q has dimension: number_of_points
1693    assert len(q.shape) == 1
1694    assert q.shape[0] == number_of_points
1695
1696
1697    if verbose:
1698        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1699
1700
1701    #Create grid and update xll/yll corner and x,y
1702
1703    #Relative extent
1704    if easting_min is None:
1705        xmin = min(x)
1706    else:
1707        xmin = easting_min - xllcorner
1708
1709    if easting_max is None:
1710        xmax = max(x)
1711    else:
1712        xmax = easting_max - xllcorner
1713
1714    if northing_min is None:
1715        ymin = min(y)
1716    else:
1717        ymin = northing_min - yllcorner
1718
1719    if northing_max is None:
1720        ymax = max(y)
1721    else:
1722        ymax = northing_max - yllcorner
1723
1724
1725
1726    if verbose: print 'Creating grid'
1727    ncols = int((xmax-xmin)/cellsize)+1
1728    nrows = int((ymax-ymin)/cellsize)+1
1729
1730
1731    #New absolute reference and coordinates
1732    newxllcorner = xmin+xllcorner
1733    newyllcorner = ymin+yllcorner
1734
1735    x = x+xllcorner-newxllcorner
1736    y = y+yllcorner-newyllcorner
1737
1738    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1739    assert len(vertex_points.shape) == 2
1740
1741
1742
1743    grid_points = zeros ( (ncols*nrows, 2), Float )
1744
1745
1746    for i in xrange(nrows):
1747        if format.lower() == 'asc':
1748            yg = i*cellsize
1749        else:
1750        #this will flip the order of the y values for ers
1751            yg = (nrows-i)*cellsize
1752
1753        for j in xrange(ncols):
1754            xg = j*cellsize
1755            k = i*ncols + j
1756
1757            grid_points[k,0] = xg
1758            grid_points[k,1] = yg
1759
1760    #Interpolate
1761    #from least_squares import Interpolation
1762    from fit_interpolate.interpolate import Interpolate
1763
1764
1765    interp = Interpolate(vertex_points, volumes, verbose = verbose)
1766
1767
1768
1769    #Interpolate using quantity values
1770    if verbose: print 'Interpolating'
1771    grid_values = interp.interpolate(q, grid_points).flat
1772
1773
1774    if verbose:
1775        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1776                                                      max(grid_values))
1777
1778    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1779    P = interp.mesh.get_boundary_polygon()
1780    outside_indices = outside_polygon(grid_points, P, closed=True)
1781
1782    for i in outside_indices:
1783        grid_values[i] = NODATA_value
1784
1785
1786
1787
1788    if format.lower() == 'ers':
1789        # setup ERS header information
1790        grid_values = reshape(grid_values,(nrows, ncols))
1791        header = {}
1792        header['datum'] = '"' + datum + '"'
1793        # FIXME The use of hardwired UTM and zone number needs to be made optional
1794        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1795        header['projection'] = '"UTM-' + str(zone) + '"'
1796        header['coordinatetype'] = 'EN'
1797        if header['coordinatetype'] == 'LL':
1798            header['longitude'] = str(newxllcorner)
1799            header['latitude'] = str(newyllcorner)
1800        elif header['coordinatetype'] == 'EN':
1801            header['eastings'] = str(newxllcorner)
1802            header['northings'] = str(newyllcorner)
1803        header['nullcellvalue'] = str(NODATA_value)
1804        header['xdimension'] = str(cellsize)
1805        header['ydimension'] = str(cellsize)
1806        header['value'] = '"' + quantity + '"'
1807        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1808
1809
1810        #Write
1811        if verbose: print 'Writing %s' %demfile
1812        import ermapper_grids
1813        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1814
1815        fid.close()
1816    else:
1817        #Write to Ascii format
1818
1819        #Write prj file
1820        prjfile = basename_out + '.prj'
1821
1822        if verbose: print 'Writing %s' %prjfile
1823        prjid = open(prjfile, 'w')
1824        prjid.write('Projection    %s\n' %'UTM')
1825        prjid.write('Zone          %d\n' %zone)
1826        prjid.write('Datum         %s\n' %datum)
1827        prjid.write('Zunits        NO\n')
1828        prjid.write('Units         METERS\n')
1829        prjid.write('Spheroid      %s\n' %datum)
1830        prjid.write('Xshift        %d\n' %false_easting)
1831        prjid.write('Yshift        %d\n' %false_northing)
1832        prjid.write('Parameters\n')
1833        prjid.close()
1834
1835
1836
1837        if verbose: print 'Writing %s' %demfile
1838
1839        ascid = open(demfile, 'w')
1840
1841        ascid.write('ncols         %d\n' %ncols)
1842        ascid.write('nrows         %d\n' %nrows)
1843        ascid.write('xllcorner     %d\n' %newxllcorner)
1844        ascid.write('yllcorner     %d\n' %newyllcorner)
1845        ascid.write('cellsize      %f\n' %cellsize)
1846        ascid.write('NODATA_value  %d\n' %NODATA_value)
1847
1848
1849        #Get bounding polygon from mesh
1850        #P = interp.mesh.get_boundary_polygon()
1851        #inside_indices = inside_polygon(grid_points, P)
1852
1853        for i in range(nrows):
1854            if verbose and i%((nrows+10)/10)==0:
1855                print 'Doing row %d of %d' %(i, nrows)
1856
1857            base_index = (nrows-i-1)*ncols
1858
1859            slice = grid_values[base_index:base_index+ncols]
1860            s = array2string(slice, max_line_width=sys.maxint)
1861            ascid.write(s[1:-1] + '\n')
1862
1863
1864            #print
1865            #for j in range(ncols):
1866            #    index = base_index+j#
1867            #    print grid_values[index],
1868            #    ascid.write('%f ' %grid_values[index])
1869            #ascid.write('\n')
1870
1871        #Close
1872        ascid.close()
1873        fid.close()
1874
1875#Backwards compatibility
1876def sww2asc(basename_in, basename_out = None,
1877            quantity = None,
1878            timestep = None,
1879            reduction = None,
1880            cellsize = 10,
1881            verbose = False,
1882            origin = None):
1883    print 'sww2asc will soon be obsoleted - please use sww2dem'
1884    sww2dem(basename_in,
1885            basename_out = basename_out,
1886            quantity = quantity,
1887            timestep = timestep,
1888            reduction = reduction,
1889            cellsize = cellsize,
1890            verbose = verbose,
1891            origin = origin,
1892        datum = 'WGS84',
1893        format = 'asc')
1894
1895def sww2ers(basename_in, basename_out = None,
1896            quantity = None,
1897            timestep = None,
1898            reduction = None,
1899            cellsize = 10,
1900            verbose = False,
1901            origin = None,
1902            datum = 'WGS84'):
1903    print 'sww2ers will soon be obsoleted - please use sww2dem'
1904    sww2dem(basename_in,
1905            basename_out = basename_out,
1906            quantity = quantity,
1907            timestep = timestep,
1908            reduction = reduction,
1909            cellsize = cellsize,
1910            verbose = verbose,
1911            origin = origin,
1912            datum = datum,
1913            format = 'ers')
1914################################# END COMPATIBILITY ##############
1915
1916
1917
1918def sww2pts(basename_in, basename_out=None,
1919            data_points=None,
1920            quantity=None,
1921            timestep=None,
1922            reduction=None,
1923            NODATA_value=-9999,
1924            verbose=False,
1925            origin=None):
1926            #datum = 'WGS84')
1927
1928
1929    """Read SWW file and convert to interpolated values at selected points
1930
1931    The parameter quantity' must be the name of an existing quantity or
1932    an expression involving existing quantities. The default is
1933    'elevation'.
1934
1935    if timestep (an index) is given, output quantity at that timestep
1936
1937    if reduction is given use that to reduce quantity over all timesteps.
1938
1939    data_points (Nx2 array) give locations of points where quantity is to be computed.
1940   
1941    """
1942
1943    import sys
1944    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1945    from Numeric import array2string
1946
1947    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1948    from util import apply_expression_to_dictionary
1949
1950    from geospatial_data import Geospatial_data
1951
1952    if quantity is None:
1953        quantity = 'elevation'
1954
1955    if reduction is None:
1956        reduction = max
1957
1958    if basename_out is None:
1959        basename_out = basename_in + '_%s' %quantity
1960
1961    swwfile = basename_in + '.sww'
1962    ptsfile = basename_out + '.pts'
1963
1964    # Read sww file
1965    if verbose: print 'Reading from %s' %swwfile
1966    from Scientific.IO.NetCDF import NetCDFFile
1967    fid = NetCDFFile(swwfile)
1968
1969    # Get extent and reference
1970    x = fid.variables['x'][:]
1971    y = fid.variables['y'][:]
1972    volumes = fid.variables['volumes'][:]
1973
1974    number_of_timesteps = fid.dimensions['number_of_timesteps']
1975    number_of_points = fid.dimensions['number_of_points']
1976    if origin is None:
1977
1978        # Get geo_reference
1979        # sww files don't have to have a geo_ref
1980        try:
1981            geo_reference = Geo_reference(NetCDFObject=fid)
1982        except AttributeError, e:
1983            geo_reference = Geo_reference() #Default georef object
1984
1985        xllcorner = geo_reference.get_xllcorner()
1986        yllcorner = geo_reference.get_yllcorner()
1987        zone = geo_reference.get_zone()
1988    else:
1989        zone = origin[0]
1990        xllcorner = origin[1]
1991        yllcorner = origin[2]
1992
1993
1994
1995    # FIXME: Refactor using code from file_function.statistics
1996    # Something like print swwstats(swwname)
1997    if verbose:
1998        x = fid.variables['x'][:]
1999        y = fid.variables['y'][:]
2000        times = fid.variables['time'][:]
2001        print '------------------------------------------------'
2002        print 'Statistics of SWW file:'
2003        print '  Name: %s' %swwfile
2004        print '  Reference:'
2005        print '    Lower left corner: [%f, %f]'\
2006              %(xllcorner, yllcorner)
2007        print '    Start time: %f' %fid.starttime[0]
2008        print '  Extent:'
2009        print '    x [m] in [%f, %f], len(x) == %d'\
2010              %(min(x.flat), max(x.flat), len(x.flat))
2011        print '    y [m] in [%f, %f], len(y) == %d'\
2012              %(min(y.flat), max(y.flat), len(y.flat))
2013        print '    t [s] in [%f, %f], len(t) == %d'\
2014              %(min(times), max(times), len(times))
2015        print '  Quantities [SI units]:'
2016        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2017            q = fid.variables[name][:].flat
2018            print '    %s in [%f, %f]' %(name, min(q), max(q))
2019
2020
2021
2022    # Get quantity and reduce if applicable
2023    if verbose: print 'Processing quantity %s' %quantity
2024
2025    # Turn NetCDF objects into Numeric arrays
2026    quantity_dict = {}
2027    for name in fid.variables.keys():
2028        quantity_dict[name] = fid.variables[name][:]
2029
2030
2031
2032    # Convert quantity expression to quantities found in sww file   
2033    q = apply_expression_to_dictionary(quantity, quantity_dict)
2034
2035
2036
2037    if len(q.shape) == 2:
2038        # q has a time component and needs to be reduced along
2039        # the temporal dimension
2040        if verbose: print 'Reducing quantity %s' %quantity
2041        q_reduced = zeros( number_of_points, Float )
2042
2043        for k in range(number_of_points):
2044            q_reduced[k] = reduction( q[:,k] )
2045
2046        q = q_reduced
2047
2048    # Post condition: Now q has dimension: number_of_points
2049    assert len(q.shape) == 1
2050    assert q.shape[0] == number_of_points
2051
2052
2053    if verbose:
2054        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2055
2056
2057    # Create grid and update xll/yll corner and x,y
2058    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2059    assert len(vertex_points.shape) == 2
2060
2061    # Interpolate
2062    from fit_interpolate.interpolate import Interpolate
2063    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2064
2065    # Interpolate using quantity values
2066    if verbose: print 'Interpolating'
2067    interpolated_values = interp.interpolate(q, data_points).flat
2068
2069
2070    if verbose:
2071        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2072                                                      max(interpolated_values))
2073
2074    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2075    P = interp.mesh.get_boundary_polygon()
2076    outside_indices = outside_polygon(data_points, P, closed=True)
2077
2078    for i in outside_indices:
2079        interpolated_values[i] = NODATA_value
2080
2081
2082    # Store results   
2083    G = Geospatial_data(data_points=data_points,
2084                        attributes=interpolated_values)
2085
2086    G.export_points_file(ptsfile, absolute = True)
2087
2088
2089
2090def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2091                                  use_cache = False,
2092                                  verbose = False):
2093    """Read Digitial Elevation model from the following ASCII format (.asc)
2094
2095    Example:
2096
2097    ncols         3121
2098    nrows         1800
2099    xllcorner     722000
2100    yllcorner     5893000
2101    cellsize      25
2102    NODATA_value  -9999
2103    138.3698 137.4194 136.5062 135.5558 ..........
2104
2105    Convert basename_in + '.asc' to NetCDF format (.dem)
2106    mimicking the ASCII format closely.
2107
2108
2109    An accompanying file with same basename_in but extension .prj must exist
2110    and is used to fix the UTM zone, datum, false northings and eastings.
2111
2112    The prj format is assumed to be as
2113
2114    Projection    UTM
2115    Zone          56
2116    Datum         WGS84
2117    Zunits        NO
2118    Units         METERS
2119    Spheroid      WGS84
2120    Xshift        0.0000000000
2121    Yshift        10000000.0000000000
2122    Parameters
2123    """
2124
2125
2126
2127    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2128
2129    if use_cache is True:
2130        from caching import cache
2131        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2132                       dependencies = [basename_in + '.asc'],
2133                       verbose = verbose)
2134
2135    else:
2136        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2137
2138    return result
2139
2140
2141
2142
2143
2144
2145def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2146                                  verbose = False):
2147    """Read Digitial Elevation model from the following ASCII format (.asc)
2148
2149    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2150    """
2151
2152    import os
2153    from Scientific.IO.NetCDF import NetCDFFile
2154    from Numeric import Float, array
2155
2156    #root, ext = os.path.splitext(basename_in)
2157    root = basename_in
2158
2159    ###########################################
2160    # Read Meta data
2161    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2162    metadatafile = open(root + '.prj')
2163    metalines = metadatafile.readlines()
2164    metadatafile.close()
2165
2166    L = metalines[0].strip().split()
2167    assert L[0].strip().lower() == 'projection'
2168    projection = L[1].strip()                   #TEXT
2169
2170    L = metalines[1].strip().split()
2171    assert L[0].strip().lower() == 'zone'
2172    zone = int(L[1].strip())
2173
2174    L = metalines[2].strip().split()
2175    assert L[0].strip().lower() == 'datum'
2176    datum = L[1].strip()                        #TEXT
2177
2178    L = metalines[3].strip().split()
2179    assert L[0].strip().lower() == 'zunits'     #IGNORE
2180    zunits = L[1].strip()                       #TEXT
2181
2182    L = metalines[4].strip().split()
2183    assert L[0].strip().lower() == 'units'
2184    units = L[1].strip()                        #TEXT
2185
2186    L = metalines[5].strip().split()
2187    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2188    spheroid = L[1].strip()                     #TEXT
2189
2190    L = metalines[6].strip().split()
2191    assert L[0].strip().lower() == 'xshift'
2192    false_easting = float(L[1].strip())
2193
2194    L = metalines[7].strip().split()
2195    assert L[0].strip().lower() == 'yshift'
2196    false_northing = float(L[1].strip())
2197
2198    #print false_easting, false_northing, zone, datum
2199
2200
2201    ###########################################
2202    #Read DEM data
2203
2204    datafile = open(basename_in + '.asc')
2205
2206    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2207    lines = datafile.readlines()
2208    datafile.close()
2209
2210    if verbose: print 'Got', len(lines), ' lines'
2211
2212    ncols = int(lines[0].split()[1].strip())
2213    nrows = int(lines[1].split()[1].strip())
2214    xllcorner = float(lines[2].split()[1].strip())
2215    yllcorner = float(lines[3].split()[1].strip())
2216    cellsize = float(lines[4].split()[1].strip())
2217    NODATA_value = int(lines[5].split()[1].strip())
2218
2219    assert len(lines) == nrows + 6
2220
2221
2222    ##########################################
2223
2224
2225    if basename_out == None:
2226        netcdfname = root + '.dem'
2227    else:
2228        netcdfname = basename_out + '.dem'
2229
2230    if verbose: print 'Store to NetCDF file %s' %netcdfname
2231    # NetCDF file definition
2232    fid = NetCDFFile(netcdfname, 'w')
2233
2234    #Create new file
2235    fid.institution = 'Geoscience Australia'
2236    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2237                      'of spatial point data'
2238
2239    fid.ncols = ncols
2240    fid.nrows = nrows
2241    fid.xllcorner = xllcorner
2242    fid.yllcorner = yllcorner
2243    fid.cellsize = cellsize
2244    fid.NODATA_value = NODATA_value
2245
2246    fid.zone = zone
2247    fid.false_easting = false_easting
2248    fid.false_northing = false_northing
2249    fid.projection = projection
2250    fid.datum = datum
2251    fid.units = units
2252
2253
2254    # dimension definitions
2255    fid.createDimension('number_of_rows', nrows)
2256    fid.createDimension('number_of_columns', ncols)
2257
2258    # variable definitions
2259    fid.createVariable('elevation', Float, ('number_of_rows',
2260                                            'number_of_columns'))
2261
2262    # Get handles to the variables
2263    elevation = fid.variables['elevation']
2264
2265    #Store data
2266    n = len(lines[6:])
2267    for i, line in enumerate(lines[6:]):
2268        fields = line.split()
2269        if verbose and i%((n+10)/10)==0:
2270            print 'Processing row %d of %d' %(i, nrows)
2271
2272        elevation[i, :] = array([float(x) for x in fields])
2273
2274    fid.close()
2275
2276
2277
2278
2279
2280def ferret2sww(basename_in, basename_out = None,
2281               verbose = False,
2282               minlat = None, maxlat = None,
2283               minlon = None, maxlon = None,
2284               mint = None, maxt = None, mean_stage = 0,
2285               origin = None, zscale = 1,
2286               fail_on_NaN = True,
2287               NaN_filler = 0,
2288               elevation = None,
2289               inverted_bathymetry = False
2290               ): #FIXME: Bathymetry should be obtained
2291                                  #from MOST somehow.
2292                                  #Alternatively from elsewhere
2293                                  #or, as a last resort,
2294                                  #specified here.
2295                                  #The value of -100 will work
2296                                  #for the Wollongong tsunami
2297                                  #scenario but is very hacky
2298    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2299    sww format native to pyvolution.
2300
2301    Specify only basename_in and read files of the form
2302    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2303    relative height, x-velocity and y-velocity, respectively.
2304
2305    Also convert latitude and longitude to UTM. All coordinates are
2306    assumed to be given in the GDA94 datum.
2307
2308    min's and max's: If omitted - full extend is used.
2309    To include a value min may equal it, while max must exceed it.
2310    Lat and lon are assuemd to be in decimal degrees
2311
2312    origin is a 3-tuple with geo referenced
2313    UTM coordinates (zone, easting, northing)
2314
2315    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2316    which means that longitude is the fastest
2317    varying dimension (row major order, so to speak)
2318
2319    ferret2sww uses grid points as vertices in a triangular grid
2320    counting vertices from lower left corner upwards, then right
2321    """
2322
2323    import os
2324    from Scientific.IO.NetCDF import NetCDFFile
2325    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2326    from Numeric import allclose, around
2327
2328    precision = Float
2329
2330    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2331
2332    if minlat != None:
2333        assert -90 < minlat < 90 , msg
2334    if maxlat != None:
2335        assert -90 < maxlat < 90 , msg
2336    if minlon != None:
2337        assert -180 < minlon < 180 , msg
2338    if maxlon != None:
2339        assert -180 < maxlon < 180 , msg
2340
2341
2342    #Get NetCDF data
2343    if verbose: print 'Reading files %s_*.nc' %basename_in
2344    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2345    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2346    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2347    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2348
2349    if basename_out is None:
2350        swwname = basename_in + '.sww'
2351    else:
2352        swwname = basename_out + '.sww'
2353
2354#    Get dimensions of file_h
2355    for dimension in file_h.dimensions.keys():
2356        if dimension[:3] == 'LON':
2357            dim_h_longitude = dimension
2358        if dimension[:3] == 'LAT':
2359            dim_h_latitude = dimension
2360        if dimension[:4] == 'TIME':
2361            dim_h_time = dimension
2362
2363#    print 'long:', dim_h_longitude
2364#    print 'lats:', dim_h_latitude
2365#    print 'times:', dim_h_time
2366
2367    times = file_h.variables[dim_h_time]
2368    latitudes = file_h.variables[dim_h_latitude]
2369    longitudes = file_h.variables[dim_h_longitude]
2370   
2371# get dimensions for file_e
2372    for dimension in file_e.dimensions.keys():
2373        if dimension[:3] == 'LON':
2374            dim_e_longitude = dimension
2375        if dimension[:3] == 'LAT':
2376            dim_e_latitude = dimension
2377        if dimension[:4] == 'TIME':
2378            dim_e_time = dimension
2379
2380# get dimensions for file_u
2381    for dimension in file_u.dimensions.keys():
2382        if dimension[:3] == 'LON':
2383            dim_u_longitude = dimension
2384        if dimension[:3] == 'LAT':
2385            dim_u_latitude = dimension
2386        if dimension[:4] == 'TIME':
2387            dim_u_time = dimension
2388           
2389# get dimensions for file_v
2390    for dimension in file_v.dimensions.keys():
2391        if dimension[:3] == 'LON':
2392            dim_v_longitude = dimension
2393        if dimension[:3] == 'LAT':
2394            dim_v_latitude = dimension
2395        if dimension[:4] == 'TIME':
2396            dim_v_time = dimension
2397
2398
2399    #Precision used by most for lat/lon is 4 or 5 decimals
2400    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2401    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2402
2403    #Check that files are compatible
2404    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2405    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2406    assert allclose(latitudes, e_lat)
2407
2408    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2409    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2410    assert allclose(longitudes, e_lon)
2411
2412    if mint == None:
2413        jmin = 0
2414    else:
2415        jmin = searchsorted(times, mint)
2416
2417    if maxt == None:
2418        jmax=len(times)
2419    else:
2420        jmax = searchsorted(times, maxt)
2421
2422    if minlat == None:
2423        kmin=0
2424    else:
2425        kmin = searchsorted(latitudes, minlat)
2426
2427    if maxlat == None:
2428        kmax = len(latitudes)
2429    else:
2430        kmax = searchsorted(latitudes, maxlat)
2431
2432    if minlon == None:
2433        lmin=0
2434    else:
2435        lmin = searchsorted(longitudes, minlon)
2436
2437    if maxlon == None:
2438        lmax = len(longitudes)
2439    else:
2440        lmax = searchsorted(longitudes, maxlon)
2441
2442#    print' j', jmin, jmax
2443    times = times[jmin:jmax]
2444    latitudes = latitudes[kmin:kmax]
2445    longitudes = longitudes[lmin:lmax]
2446
2447
2448    if verbose: print 'cropping'
2449    zname = 'ELEVATION'
2450
2451    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2452    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2453    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2454    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2455
2456    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2457    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2458    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2459    #        from Numeric import asarray
2460    #        elevations=elevations.tolist()
2461    #        elevations.reverse()
2462    #        elevations=asarray(elevations)
2463    #    else:
2464    #        from Numeric import asarray
2465    #        elevations=elevations.tolist()
2466    #        elevations.reverse()
2467    #        elevations=asarray(elevations)
2468    #        'print hmmm'
2469
2470
2471
2472    #Get missing values
2473    nan_ha = file_h.variables['HA'].missing_value[0]
2474    nan_ua = file_u.variables['UA'].missing_value[0]
2475    nan_va = file_v.variables['VA'].missing_value[0]
2476    if hasattr(file_e.variables[zname],'missing_value'):
2477        nan_e  = file_e.variables[zname].missing_value[0]
2478    else:
2479        nan_e = None
2480
2481    #Cleanup
2482    from Numeric import sometrue
2483
2484    missing = (amplitudes == nan_ha)
2485    if sometrue (missing):
2486        if fail_on_NaN:
2487            msg = 'NetCDFFile %s contains missing values'\
2488                  %(basename_in+'_ha.nc')
2489            raise DataMissingValuesError, msg
2490        else:
2491            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2492
2493    missing = (uspeed == nan_ua)
2494    if sometrue (missing):
2495        if fail_on_NaN:
2496            msg = 'NetCDFFile %s contains missing values'\
2497                  %(basename_in+'_ua.nc')
2498            raise DataMissingValuesError, msg
2499        else:
2500            uspeed = uspeed*(missing==0) + missing*NaN_filler
2501
2502    missing = (vspeed == nan_va)
2503    if sometrue (missing):
2504        if fail_on_NaN:
2505            msg = 'NetCDFFile %s contains missing values'\
2506                  %(basename_in+'_va.nc')
2507            raise DataMissingValuesError, msg
2508        else:
2509            vspeed = vspeed*(missing==0) + missing*NaN_filler
2510
2511
2512    missing = (elevations == nan_e)
2513    if sometrue (missing):
2514        if fail_on_NaN:
2515            msg = 'NetCDFFile %s contains missing values'\
2516                  %(basename_in+'_e.nc')
2517            raise DataMissingValuesError, msg
2518        else:
2519            elevations = elevations*(missing==0) + missing*NaN_filler
2520
2521    #######
2522
2523
2524
2525    number_of_times = times.shape[0]
2526    number_of_latitudes = latitudes.shape[0]
2527    number_of_longitudes = longitudes.shape[0]
2528
2529    assert amplitudes.shape[0] == number_of_times
2530    assert amplitudes.shape[1] == number_of_latitudes
2531    assert amplitudes.shape[2] == number_of_longitudes
2532
2533    if verbose:
2534        print '------------------------------------------------'
2535        print 'Statistics:'
2536        print '  Extent (lat/lon):'
2537        print '    lat in [%f, %f], len(lat) == %d'\
2538              %(min(latitudes.flat), max(latitudes.flat),
2539                len(latitudes.flat))
2540        print '    lon in [%f, %f], len(lon) == %d'\
2541              %(min(longitudes.flat), max(longitudes.flat),
2542                len(longitudes.flat))
2543        print '    t in [%f, %f], len(t) == %d'\
2544              %(min(times.flat), max(times.flat), len(times.flat))
2545
2546        q = amplitudes.flat
2547        name = 'Amplitudes (ha) [cm]'
2548        print %s in [%f, %f]' %(name, min(q), max(q))
2549
2550        q = uspeed.flat
2551        name = 'Speeds (ua) [cm/s]'
2552        print %s in [%f, %f]' %(name, min(q), max(q))
2553
2554        q = vspeed.flat
2555        name = 'Speeds (va) [cm/s]'
2556        print %s in [%f, %f]' %(name, min(q), max(q))
2557
2558        q = elevations.flat
2559        name = 'Elevations (e) [m]'
2560        print %s in [%f, %f]' %(name, min(q), max(q))
2561
2562
2563    #print number_of_latitudes, number_of_longitudes
2564    number_of_points = number_of_latitudes*number_of_longitudes
2565    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2566
2567
2568    file_h.close()
2569    file_u.close()
2570    file_v.close()
2571    file_e.close()
2572
2573
2574    # NetCDF file definition
2575    outfile = NetCDFFile(swwname, 'w')
2576
2577    #Create new file
2578    outfile.institution = 'Geoscience Australia'
2579    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2580                          %(basename_in + '_ha.nc',
2581                            basename_in + '_ua.nc',
2582                            basename_in + '_va.nc',
2583                            basename_in + '_e.nc')
2584
2585
2586    #For sww compatibility
2587    outfile.smoothing = 'Yes'
2588    outfile.order = 1
2589
2590    #Start time in seconds since the epoch (midnight 1/1/1970)
2591    outfile.starttime = starttime = times[0]
2592    times = times - starttime  #Store relative times
2593
2594    # dimension definitions
2595    outfile.createDimension('number_of_volumes', number_of_volumes)
2596
2597    outfile.createDimension('number_of_vertices', 3)
2598    outfile.createDimension('number_of_points', number_of_points)
2599
2600
2601    #outfile.createDimension('number_of_timesteps', len(times))
2602    outfile.createDimension('number_of_timesteps', len(times))
2603
2604    # variable definitions
2605    outfile.createVariable('x', precision, ('number_of_points',))
2606    outfile.createVariable('y', precision, ('number_of_points',))
2607    outfile.createVariable('elevation', precision, ('number_of_points',))
2608
2609    #FIXME: Backwards compatibility
2610    outfile.createVariable('z', precision, ('number_of_points',))
2611    #################################
2612
2613    outfile.createVariable('volumes', Int, ('number_of_volumes',
2614                                            'number_of_vertices'))
2615
2616    outfile.createVariable('time', precision,
2617                           ('number_of_timesteps',))
2618
2619    outfile.createVariable('stage', precision,
2620                           ('number_of_timesteps',
2621                            'number_of_points'))
2622
2623    outfile.createVariable('xmomentum', precision,
2624                           ('number_of_timesteps',
2625                            'number_of_points'))
2626
2627    outfile.createVariable('ymomentum', precision,
2628                           ('number_of_timesteps',
2629                            'number_of_points'))
2630
2631
2632    #Store
2633    from coordinate_transforms.redfearn import redfearn
2634    x = zeros(number_of_points, Float)  #Easting
2635    y = zeros(number_of_points, Float)  #Northing
2636
2637
2638    if verbose: print 'Making triangular grid'
2639    #Check zone boundaries
2640    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2641
2642    vertices = {}
2643    i = 0
2644    for k, lat in enumerate(latitudes):       #Y direction
2645        for l, lon in enumerate(longitudes):  #X direction
2646
2647            vertices[l,k] = i
2648
2649            zone, easting, northing = redfearn(lat,lon)
2650
2651            msg = 'Zone boundary crossed at longitude =', lon
2652            #assert zone == refzone, msg
2653            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2654            x[i] = easting
2655            y[i] = northing
2656            i += 1
2657
2658
2659    #Construct 2 triangles per 'rectangular' element
2660    volumes = []
2661    for l in range(number_of_longitudes-1):    #X direction
2662        for k in range(number_of_latitudes-1): #Y direction
2663            v1 = vertices[l,k+1]
2664            v2 = vertices[l,k]
2665            v3 = vertices[l+1,k+1]
2666            v4 = vertices[l+1,k]
2667
2668            volumes.append([v1,v2,v3]) #Upper element
2669            volumes.append([v4,v3,v2]) #Lower element
2670
2671    volumes = array(volumes)
2672
2673    if origin == None:
2674        zone = refzone
2675        xllcorner = min(x)
2676        yllcorner = min(y)
2677    else:
2678        zone = origin[0]
2679        xllcorner = origin[1]
2680        yllcorner = origin[2]
2681
2682
2683    outfile.xllcorner = xllcorner
2684    outfile.yllcorner = yllcorner
2685    outfile.zone = zone
2686
2687
2688    if elevation is not None:
2689        z = elevation
2690    else:
2691        if inverted_bathymetry:
2692            z = -1*elevations
2693        else:
2694            z = elevations
2695    #FIXME: z should be obtained from MOST and passed in here
2696
2697    from Numeric import resize
2698    z = resize(z,outfile.variables['z'][:].shape)
2699    outfile.variables['x'][:] = x - xllcorner
2700    outfile.variables['y'][:] = y - yllcorner
2701    outfile.variables['z'][:] = z             #FIXME HACK
2702    outfile.variables['elevation'][:] = z
2703    outfile.variables['time'][:] = times   #Store time relative
2704    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2705
2706
2707
2708    #Time stepping
2709    stage = outfile.variables['stage']
2710    xmomentum = outfile.variables['xmomentum']
2711    ymomentum = outfile.variables['ymomentum']
2712
2713    if verbose: print 'Converting quantities'
2714    n = len(times)
2715    for j in range(n):
2716        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2717        i = 0
2718        for k in range(number_of_latitudes):      #Y direction
2719            for l in range(number_of_longitudes): #X direction
2720                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2721                stage[j,i] = w
2722                h = w - z[i]
2723                xmomentum[j,i] = uspeed[j,k,l]/100*h
2724                ymomentum[j,i] = vspeed[j,k,l]/100*h
2725                i += 1
2726
2727    #outfile.close()
2728
2729    #FIXME: Refactor using code from file_function.statistics
2730    #Something like print swwstats(swwname)
2731    if verbose:
2732        x = outfile.variables['x'][:]
2733        y = outfile.variables['y'][:]
2734        times = outfile.variables['time'][:]
2735        print '------------------------------------------------'
2736        print 'Statistics of output file:'
2737        print '  Name: %s' %swwname
2738        print '  Reference:'
2739        print '    Lower left corner: [%f, %f]'\
2740              %(xllcorner, yllcorner)
2741        print '    Start time: %f' %starttime
2742        print '  Extent:'
2743        print '    x [m] in [%f, %f], len(x) == %d'\
2744              %(min(x.flat), max(x.flat), len(x.flat))
2745        print '    y [m] in [%f, %f], len(y) == %d'\
2746              %(min(y.flat), max(y.flat), len(y.flat))
2747        print '    t [s] in [%f, %f], len(t) == %d'\
2748              %(min(times), max(times), len(times))
2749        print '  Quantities [SI units]:'
2750        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2751            q = outfile.variables[name][:].flat
2752            print '    %s in [%f, %f]' %(name, min(q), max(q))
2753
2754
2755
2756    outfile.close()
2757
2758
2759
2760
2761
2762def timefile2netcdf(filename, quantity_names = None):
2763    """Template for converting typical text files with time series to
2764    NetCDF tms file.
2765
2766
2767    The file format is assumed to be either two fields separated by a comma:
2768
2769        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2770
2771    E.g
2772
2773      31/08/04 00:00:00, 1.328223 0 0
2774      31/08/04 00:15:00, 1.292912 0 0
2775
2776    will provide a time dependent function f(t) with three attributes
2777
2778    filename is assumed to be the rootname with extenisons .txt and .sww
2779    """
2780
2781    import time, calendar
2782    from Numeric import array
2783    from config import time_format
2784    from utilities.numerical_tools import ensure_numeric
2785
2786
2787
2788    fid = open(filename + '.txt')
2789    line = fid.readline()
2790    fid.close()
2791
2792    fields = line.split(',')
2793    msg = 'File %s must have the format date, value0 value1 value2 ...'
2794    assert len(fields) == 2, msg
2795
2796    try:
2797        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2798    except ValueError:
2799        msg = 'First field in file %s must be' %filename
2800        msg += ' date-time with format %s.\n' %time_format
2801        msg += 'I got %s instead.' %fields[0]
2802        raise DataTimeError, msg
2803
2804
2805    #Split values
2806    values = []
2807    for value in fields[1].split():
2808        values.append(float(value))
2809
2810    q = ensure_numeric(values)
2811
2812    msg = 'ERROR: File must contain at least one independent value'
2813    assert len(q.shape) == 1, msg
2814
2815
2816
2817    #Read times proper
2818    from Numeric import zeros, Float, alltrue
2819    from config import time_format
2820    import time, calendar
2821
2822    fid = open(filename + '.txt')
2823    lines = fid.readlines()
2824    fid.close()
2825
2826    N = len(lines)
2827    d = len(q)
2828
2829    T = zeros(N, Float)       #Time
2830    Q = zeros((N, d), Float)  #Values
2831
2832    for i, line in enumerate(lines):
2833        fields = line.split(',')
2834        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2835
2836        T[i] = realtime - starttime
2837
2838        for j, value in enumerate(fields[1].split()):
2839            Q[i, j] = float(value)
2840
2841    msg = 'File %s must list time as a monotonuosly ' %filename
2842    msg += 'increasing sequence'
2843    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2844
2845
2846    #Create NetCDF file
2847    from Scientific.IO.NetCDF import NetCDFFile
2848
2849    fid = NetCDFFile(filename + '.tms', 'w')
2850
2851
2852    fid.institution = 'Geoscience Australia'
2853    fid.description = 'Time series'
2854
2855
2856    #Reference point
2857    #Start time in seconds since the epoch (midnight 1/1/1970)
2858    #FIXME: Use Georef
2859    fid.starttime = starttime
2860
2861
2862    # dimension definitions
2863    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2864    #fid.createDimension('number_of_vertices', 3)
2865
2866
2867    fid.createDimension('number_of_timesteps', len(T))
2868
2869    fid.createVariable('time', Float, ('number_of_timesteps',))
2870
2871    fid.variables['time'][:] = T
2872
2873    for i in range(Q.shape[1]):
2874        try:
2875            name = quantity_names[i]
2876        except:
2877            name = 'Attribute%d'%i
2878
2879        fid.createVariable(name, Float, ('number_of_timesteps',))
2880        fid.variables[name][:] = Q[:,i]
2881
2882    fid.close()
2883
2884
2885def extent_sww(file_name):
2886    """
2887    Read in an sww file.
2888
2889    Input;
2890    file_name - the sww file
2891
2892    Output;
2893    z - Vector of bed elevation
2894    volumes - Array.  Each row has 3 values, representing
2895    the vertices that define the volume
2896    time - Vector of the times where there is stage information
2897    stage - array with respect to time and vertices (x,y)
2898    """
2899
2900
2901    from Scientific.IO.NetCDF import NetCDFFile
2902
2903    #Check contents
2904    #Get NetCDF
2905    fid = NetCDFFile(file_name, 'r')
2906
2907    # Get the variables
2908    x = fid.variables['x'][:]
2909    y = fid.variables['y'][:]
2910    stage = fid.variables['stage'][:]
2911    #print "stage",stage
2912    #print "stage.shap",stage.shape
2913    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2914    #print "min(stage)",min(stage)
2915
2916    fid.close()
2917
2918    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2919
2920
2921def sww2domain(filename,boundary=None,t=None,\
2922               fail_if_NaN=True,NaN_filler=0\
2923               ,verbose = False,very_verbose = False):
2924    """
2925    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2926
2927    Boundary is not recommended if domain.smooth is not selected, as it
2928    uses unique coordinates, but not unique boundaries. This means that
2929    the boundary file will not be compatable with the coordinates, and will
2930    give a different final boundary, or crash.
2931    """
2932    NaN=9.969209968386869e+036
2933    #initialise NaN.
2934
2935    from Scientific.IO.NetCDF import NetCDFFile
2936    from shallow_water import Domain
2937    from Numeric import asarray, transpose, resize
2938
2939    if verbose: print 'Reading from ', filename
2940    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2941    time = fid.variables['time']       #Timesteps
2942    if t is None:
2943        t = time[-1]
2944    time_interp = get_time_interp(time,t)
2945
2946    # Get the variables as Numeric arrays
2947    x = fid.variables['x'][:]             #x-coordinates of vertices
2948    y = fid.variables['y'][:]             #y-coordinates of vertices
2949    elevation = fid.variables['elevation']     #Elevation
2950    stage = fid.variables['stage']     #Water level
2951    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2952    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2953
2954    starttime = fid.starttime[0]
2955    volumes = fid.variables['volumes'][:] #Connectivity
2956    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2957
2958    conserved_quantities = []
2959    interpolated_quantities = {}
2960    other_quantities = []
2961
2962    # get geo_reference
2963    #sww files don't have to have a geo_ref
2964    try:
2965        geo_reference = Geo_reference(NetCDFObject=fid)
2966    except: #AttributeError, e:
2967        geo_reference = None
2968
2969    if verbose: print '    getting quantities'
2970    for quantity in fid.variables.keys():
2971        dimensions = fid.variables[quantity].dimensions
2972        if 'number_of_timesteps' in dimensions:
2973            conserved_quantities.append(quantity)
2974            interpolated_quantities[quantity]=\
2975                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2976        else: other_quantities.append(quantity)
2977
2978    other_quantities.remove('x')
2979    other_quantities.remove('y')
2980    other_quantities.remove('z')
2981    other_quantities.remove('volumes')
2982
2983    conserved_quantities.remove('time')
2984
2985    if verbose: print '    building domain'
2986    #    From domain.Domain:
2987    #    domain = Domain(coordinates, volumes,\
2988    #                    conserved_quantities = conserved_quantities,\
2989    #                    other_quantities = other_quantities,zone=zone,\
2990    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2991
2992    #   From shallow_water.Domain:
2993    coordinates=coordinates.tolist()
2994    volumes=volumes.tolist()
2995    #FIXME:should this be in mesh?(peter row)
2996    if fid.smoothing == 'Yes': unique = False
2997    else: unique = True
2998    if unique:
2999        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3000
3001
3002    try:
3003        domain = Domain(coordinates, volumes, boundary)
3004    except AssertionError, e:
3005        fid.close()
3006        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3007        raise DataDomainError, msg
3008
3009    if not boundary is None:
3010        domain.boundary = boundary
3011
3012    domain.geo_reference = geo_reference
3013
3014    domain.starttime=float(starttime)+float(t)
3015    domain.time=0.0
3016
3017    for quantity in other_quantities:
3018        try:
3019            NaN = fid.variables[quantity].missing_value
3020        except:
3021            pass #quantity has no missing_value number
3022        X = fid.variables[quantity][:]
3023        if very_verbose:
3024            print '       ',quantity
3025            print '        NaN =',NaN
3026            print '        max(X)'
3027            print '       ',max(X)
3028            print '        max(X)==NaN'
3029            print '       ',max(X)==NaN
3030            print ''
3031        if (max(X)==NaN) or (min(X)==NaN):
3032            if fail_if_NaN:
3033                msg = 'quantity "%s" contains no_data entry'%quantity
3034                raise DataMissingValuesError, msg
3035            else:
3036                data = (X<>NaN)
3037                X = (X*data)+(data==0)*NaN_filler
3038        if unique:
3039            X = resize(X,(len(X)/3,3))
3040        domain.set_quantity(quantity,X)
3041    #
3042    for quantity in conserved_quantities:
3043        try:
3044            NaN = fid.variables[quantity].missing_value
3045        except:
3046            pass #quantity has no missing_value number
3047        X = interpolated_quantities[quantity]
3048        if very_verbose:
3049            print '       ',quantity
3050            print '        NaN =',NaN
3051            print '        max(X)'
3052            print '       ',max(X)
3053            print '        max(X)==NaN'
3054            print '       ',max(X)==NaN
3055            print ''
3056        if (max(X)==NaN) or (min(X)==NaN):
3057            if fail_if_NaN:
3058                msg = 'quantity "%s" contains no_data entry'%quantity
3059                raise DataMissingValuesError, msg
3060            else:
3061                data = (X<>NaN)
3062                X = (X*data)+(data==0)*NaN_filler
3063        if unique:
3064            X = resize(X,(X.shape[0]/3,3))
3065        domain.set_quantity(quantity,X)
3066
3067    fid.close()
3068    return domain
3069
3070def interpolated_quantity(saved_quantity,time_interp):
3071
3072    #given an index and ratio, interpolate quantity with respect to time.
3073    index,ratio = time_interp
3074    Q = saved_quantity
3075    if ratio > 0:
3076        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3077    else:
3078        q = Q[index]
3079    #Return vector of interpolated values
3080    return q
3081
3082def get_time_interp(time,t=None):
3083    #Finds the ratio and index for time interpolation.
3084    #It is borrowed from previous pyvolution code.
3085    if t is None:
3086        t=time[-1]
3087        index = -1
3088        ratio = 0.
3089    else:
3090        T = time
3091        tau = t
3092        index=0
3093        msg = 'Time interval derived from file %s [%s:%s]'\
3094            %('FIXMEfilename', T[0], T[-1])
3095        msg += ' does not match model time: %s' %tau
3096        if tau < time[0]: raise DataTimeError, msg
3097        if tau > time[-1]: raise DataTimeError, msg
3098        while tau > time[index]: index += 1
3099        while tau < time[index]: index -= 1
3100        if tau == time[index]:
3101            #Protect against case where tau == time[-1] (last time)
3102            # - also works in general when tau == time[i]
3103            ratio = 0
3104        else:
3105            #t is now between index and index+1
3106            ratio = (tau - time[index])/(time[index+1] - time[index])
3107    return (index,ratio)
3108
3109
3110def weed(coordinates,volumes,boundary = None):
3111    if type(coordinates)=='array':
3112        coordinates = coordinates.tolist()
3113    if type(volumes)=='array':
3114        volumes = volumes.tolist()
3115
3116    unique = False
3117    point_dict = {}
3118    same_point = {}
3119    for i in range(len(coordinates)):
3120        point = tuple(coordinates[i])
3121        if point_dict.has_key(point):
3122            unique = True
3123            same_point[i]=point
3124            #to change all point i references to point j
3125        else:
3126            point_dict[point]=i
3127            same_point[i]=point
3128
3129    coordinates = []
3130    i = 0
3131    for point in point_dict.keys():
3132        point = tuple(point)
3133        coordinates.append(list(point))
3134        point_dict[point]=i
3135        i+=1
3136
3137
3138    for volume in volumes:
3139        for i in range(len(volume)):
3140            index = volume[i]
3141            if index>-1:
3142                volume[i]=point_dict[same_point[index]]
3143
3144    new_boundary = {}
3145    if not boundary is None:
3146        for segment in boundary.keys():
3147            point0 = point_dict[same_point[segment[0]]]
3148            point1 = point_dict[same_point[segment[1]]]
3149            label = boundary[segment]
3150            #FIXME should the bounday attributes be concaterated
3151            #('exterior, pond') or replaced ('pond')(peter row)
3152
3153            if new_boundary.has_key((point0,point1)):
3154                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3155                                              #+','+label
3156
3157            elif new_boundary.has_key((point1,point0)):
3158                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3159                                              #+','+label
3160            else: new_boundary[(point0,point1)]=label
3161
3162        boundary = new_boundary
3163
3164    return coordinates,volumes,boundary
3165
3166
3167def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3168                 verbose=False):
3169    """Read Digitial Elevation model from the following NetCDF format (.dem)
3170
3171    Example:
3172
3173    ncols         3121
3174    nrows         1800
3175    xllcorner     722000
3176    yllcorner     5893000
3177    cellsize      25
3178    NODATA_value  -9999
3179    138.3698 137.4194 136.5062 135.5558 ..........
3180
3181    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3182    """
3183
3184    import os
3185    from Scientific.IO.NetCDF import NetCDFFile
3186    from Numeric import Float, zeros, sum, reshape, equal
3187
3188    root = basename_in
3189    inname = root + '.dem'
3190
3191    #Open existing netcdf file to read
3192    infile = NetCDFFile(inname, 'r')
3193    if verbose: print 'Reading DEM from %s' %inname
3194
3195    #Read metadata
3196    ncols = infile.ncols[0]
3197    nrows = infile.nrows[0]
3198    xllcorner = infile.xllcorner[0]
3199    yllcorner = infile.yllcorner[0]
3200    cellsize = infile.cellsize[0]
3201    NODATA_value = infile.NODATA_value[0]
3202    zone = infile.zone[0]
3203    false_easting = infile.false_easting[0]
3204    false_northing = infile.false_northing[0]
3205    projection = infile.projection
3206    datum = infile.datum
3207    units = infile.units
3208
3209    dem_elevation = infile.variables['elevation']
3210
3211    #Get output file name
3212    if basename_out == None:
3213        outname = root + '_' + repr(cellsize_new) + '.dem'
3214    else:
3215        outname = basename_out + '.dem'
3216
3217    if verbose: print 'Write decimated NetCDF file to %s' %outname
3218
3219    #Determine some dimensions for decimated grid
3220    (nrows_stencil, ncols_stencil) = stencil.shape
3221    x_offset = ncols_stencil / 2
3222    y_offset = nrows_stencil / 2
3223    cellsize_ratio = int(cellsize_new / cellsize)
3224    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3225    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3226
3227    #Open netcdf file for output
3228    outfile = NetCDFFile(outname, 'w')
3229
3230    #Create new file
3231    outfile.institution = 'Geoscience Australia'
3232    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3233                           'of spatial point data'
3234    #Georeferencing
3235    outfile.zone = zone
3236    outfile.projection = projection
3237    outfile.datum = datum
3238    outfile.units = units
3239
3240    outfile.cellsize = cellsize_new
3241    outfile.NODATA_value = NODATA_value
3242    outfile.false_easting = false_easting
3243    outfile.false_northing = false_northing
3244
3245    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3246    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3247    outfile.ncols = ncols_new
3248    outfile.nrows = nrows_new
3249
3250    # dimension definition
3251    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3252
3253    # variable definition
3254    outfile.createVariable('elevation', Float, ('number_of_points',))
3255
3256    # Get handle to the variable
3257    elevation = outfile.variables['elevation']
3258
3259    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3260
3261    #Store data
3262    global_index = 0
3263    for i in range(nrows_new):
3264        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3265        lower_index = global_index
3266        telev =  zeros(ncols_new, Float)
3267        local_index = 0
3268        trow = i * cellsize_ratio
3269
3270        for j in range(ncols_new):
3271            tcol = j * cellsize_ratio
3272            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3273
3274            #if dem contains 1 or more NODATA_values set value in
3275            #decimated dem to NODATA_value, else compute decimated
3276            #value using stencil
3277            if sum(sum(equal(tmp, NODATA_value))) > 0:
3278                telev[local_index] = NODATA_value
3279            else:
3280                telev[local_index] = sum(sum(tmp * stencil))
3281
3282            global_index += 1
3283            local_index += 1
3284
3285        upper_index = global_index
3286
3287        elevation[lower_index:upper_index] = telev
3288
3289    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3290
3291    infile.close()
3292    outfile.close()
3293
3294
3295
3296
3297def tsh2sww(filename, verbose=False): #test_tsh2sww
3298    """
3299    to check if a tsh/msh file 'looks' good.
3300    """
3301
3302    #FIXME Move to data_manager
3303    from shallow_water import Domain
3304    from pmesh2domain import pmesh_to_domain_instance
3305    import time, os
3306    from data_manager import get_dataobject
3307    from os import sep, path
3308    from utilities.numerical_tools import mean
3309
3310    if verbose == True:print 'Creating domain from', filename
3311    domain = pmesh_to_domain_instance(filename, Domain)
3312    if verbose == True:print "Number of triangles = ", len(domain)
3313
3314    domain.smooth = True
3315    domain.format = 'sww'   #Native netcdf visualisation format
3316    file_path, filename = path.split(filename)
3317    filename, ext = path.splitext(filename)
3318    domain.filename = filename
3319    domain.reduction = mean
3320    if verbose == True:print "file_path",file_path
3321    if file_path == "":file_path = "."
3322    domain.set_datadir(file_path)
3323
3324    if verbose == True:
3325        print "Output written to " + domain.get_datadir() + sep + \
3326              domain.filename + "." + domain.format
3327    sww = get_dataobject(domain)
3328    sww.store_connectivity()
3329    sww.store_timestep('stage')
3330
3331
3332def asc_csiro2sww(bath_dir,
3333                  elevation_dir,
3334                  ucur_dir,
3335                  vcur_dir,
3336                  sww_file,
3337                  minlat = None, maxlat = None,
3338                  minlon = None, maxlon = None,
3339                  zscale=1,
3340                  mean_stage = 0,
3341                  fail_on_NaN = True,
3342                  elevation_NaN_filler = 0,
3343                  bath_prefix='ba',
3344                  elevation_prefix='el',
3345                  verbose=False):
3346    """
3347    Produce an sww boundary file, from esri ascii data from CSIRO.
3348
3349    Also convert latitude and longitude to UTM. All coordinates are
3350    assumed to be given in the GDA94 datum.
3351
3352    assume:
3353    All files are in esri ascii format
3354
3355    4 types of information
3356    bathymetry
3357    elevation
3358    u velocity
3359    v velocity
3360
3361    Assumptions
3362    The metadata of all the files is the same
3363    Each type is in a seperate directory
3364    One bath file with extention .000
3365    The time period is less than 24hrs and uniform.
3366    """
3367    from Scientific.IO.NetCDF import NetCDFFile
3368
3369    from coordinate_transforms.redfearn import redfearn
3370
3371    precision = Float # So if we want to change the precision its done here
3372
3373    # go in to the bath dir and load the only file,
3374    bath_files = os.listdir(bath_dir)
3375    #print "bath_files",bath_files
3376
3377    #fixme: make more general?
3378    bath_file = bath_files[0]
3379    bath_dir_file =  bath_dir + os.sep + bath_file
3380    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3381    #print "bath_metadata",bath_metadata
3382    #print "bath_grid",bath_grid
3383
3384    #Use the date.time of the bath file as a basis for
3385    #the start time for other files
3386    base_start = bath_file[-12:]
3387
3388    #go into the elevation dir and load the 000 file
3389    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3390                         + base_start
3391    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3392    #print "elevation_dir_file",elevation_dir_file
3393    #print "elevation_grid", elevation_grid
3394
3395    elevation_files = os.listdir(elevation_dir)
3396    ucur_files = os.listdir(ucur_dir)
3397    vcur_files = os.listdir(vcur_dir)
3398
3399    # the first elevation file should be the
3400    # file with the same base name as the bath data
3401    #print "elevation_files[0]",elevation_files[0]
3402    #print "'el' + base_start",'el' + base_start
3403    assert elevation_files[0] == 'el' + base_start
3404
3405    #assert bath_metadata == elevation_metadata
3406
3407
3408
3409    number_of_latitudes = bath_grid.shape[0]
3410    number_of_longitudes = bath_grid.shape[1]
3411    #number_of_times = len(os.listdir(elevation_dir))
3412    #number_of_points = number_of_latitudes*number_of_longitudes
3413    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3414
3415    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3416                  for x in range(number_of_longitudes)]
3417    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3418                 for y in range(number_of_latitudes)]
3419
3420     # reverse order of lat, so the fist lat represents the first grid row
3421    latitudes.reverse()
3422
3423    #print "latitudes - before _get_min_max_indexes",latitudes
3424    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3425                                                 minlat=minlat, maxlat=maxlat,
3426                                                 minlon=minlon, maxlon=maxlon)
3427
3428
3429    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3430    #print "bath_grid",bath_grid
3431    latitudes = latitudes[kmin:kmax]
3432    longitudes = longitudes[lmin:lmax]
3433    number_of_latitudes = len(latitudes)
3434    number_of_longitudes = len(longitudes)
3435    number_of_times = len(os.listdir(elevation_dir))
3436    number_of_points = number_of_latitudes*number_of_longitudes
3437    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3438    #print "number_of_points",number_of_points
3439
3440    #Work out the times
3441    if len(elevation_files) > 1:
3442        # Assume: The time period is less than 24hrs.
3443        time_period = (int(elevation_files[1][-3:]) - \
3444                      int(elevation_files[0][-3:]))*60*60
3445        times = [x*time_period for x in range(len(elevation_files))]
3446    else:
3447        times = [0.0]
3448    #print "times", times
3449    #print "number_of_latitudes", number_of_latitudes
3450    #print "number_of_longitudes", number_of_longitudes
3451    #print "number_of_times", number_of_times
3452    #print "latitudes", latitudes
3453    #print "longitudes", longitudes
3454
3455
3456    if verbose:
3457        print '------------------------------------------------'
3458        print 'Statistics:'
3459        print '  Extent (lat/lon):'
3460        print '    lat in [%f, %f], len(lat) == %d'\
3461              %(min(latitudes), max(latitudes),
3462                len(latitudes))
3463        print '    lon in [%f, %f], len(lon) == %d'\
3464              %(min(longitudes), max(longitudes),
3465                len(longitudes))
3466        print '    t in [%f, %f], len(t) == %d'\
3467              %(min(times), max(times), len(times))
3468
3469    ######### WRITE THE SWW FILE #############
3470    # NetCDF file definition
3471    outfile = NetCDFFile(sww_file, 'w')
3472
3473    #Create new file
3474    outfile.institution = 'Geoscience Australia'
3475    outfile.description = 'Converted from XXX'
3476
3477
3478    #For sww compatibility
3479    outfile.smoothing = 'Yes'
3480    outfile.order = 1
3481
3482    #Start time in seconds since the epoch (midnight 1/1/1970)
3483    outfile.starttime = starttime = times[0]
3484
3485
3486    # dimension definitions
3487    outfile.createDimension('number_of_volumes', number_of_volumes)
3488
3489    outfile.createDimension('number_of_vertices', 3)
3490    outfile.createDimension('number_of_points', number_of_points)
3491    outfile.createDimension('number_of_timesteps', number_of_times)
3492
3493    # variable definitions
3494    outfile.createVariable('x', precision, ('number_of_points',))
3495    outfile.createVariable('y', precision, ('number_of_points',))
3496    outfile.createVariable('elevation', precision, ('number_of_points',))
3497
3498    #FIXME: Backwards compatibility
3499    outfile.createVariable('z', precision, ('number_of_points',))
3500    #################################
3501
3502    outfile.createVariable('volumes', Int, ('number_of_volumes',
3503                                            'number_of_vertices'))
3504
3505    outfile.createVariable('time', precision,
3506                           ('number_of_timesteps',))
3507
3508    outfile.createVariable('stage', precision,
3509                           ('number_of_timesteps',
3510                            'number_of_points'))
3511
3512    outfile.createVariable('xmomentum', precision,
3513                           ('number_of_timesteps',
3514                            'number_of_points'))
3515
3516    outfile.createVariable('ymomentum', precision,
3517                           ('number_of_timesteps',
3518                            'number_of_points'))
3519
3520    #Store
3521    from coordinate_transforms.redfearn import redfearn
3522    x = zeros(number_of_points, Float)  #Easting
3523    y = zeros(number_of_points, Float)  #Northing
3524
3525    if verbose: print 'Making triangular grid'
3526    #Get zone of 1st point.
3527    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3528
3529    vertices = {}
3530    i = 0
3531    for k, lat in enumerate(latitudes):
3532        for l, lon in enumerate(longitudes):
3533
3534            vertices[l,k] = i
3535
3536            zone, easting, northing = redfearn(lat,lon)
3537
3538            msg = 'Zone boundary crossed at longitude =', lon
3539            #assert zone == refzone, msg
3540            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3541            x[i] = easting
3542            y[i] = northing
3543            i += 1
3544
3545
3546    #Construct 2 triangles per 'rectangular' element
3547    volumes = []
3548    for l in range(number_of_longitudes-1):    #X direction
3549        for k in range(number_of_latitudes-1): #Y direction
3550            v1 = vertices[l,k+1]
3551            v2 = vertices[l,k]
3552            v3 = vertices[l+1,k+1]
3553            v4 = vertices[l+1,k]
3554
3555            #Note, this is different to the ferrit2sww code
3556            #since the order of the lats is reversed.
3557            volumes.append([v1,v3,v2]) #Upper element
3558            volumes.append([v4,v2,v3]) #Lower element
3559
3560    volumes = array(volumes)
3561
3562    geo_ref = Geo_reference(refzone,min(x),min(y))
3563    geo_ref.write_NetCDF(outfile)
3564
3565    # This will put the geo ref in the middle
3566    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3567
3568
3569    if verbose:
3570        print '------------------------------------------------'
3571        print 'More Statistics:'
3572        print '  Extent (/lon):'
3573        print '    x in [%f, %f], len(lat) == %d'\
3574              %(min(x), max(x),
3575                len(x))
3576        print '    y in [%f, %f], len(lon) == %d'\
3577              %(min(y), max(y),
3578                len(y))
3579        print 'geo_ref: ',geo_ref
3580
3581    z = resize(bath_grid,outfile.variables['z'][:].shape)
3582    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3583    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3584    outfile.variables['z'][:] = z
3585    outfile.variables['elevation'][:] = z  #FIXME HACK
3586    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3587
3588    # do this to create an ok sww file.
3589    #outfile.variables['time'][:] = [0]   #Store time relative
3590    #outfile.variables['stage'] = z
3591    # put the next line up in the code after outfile.order = 1
3592    #number_of_times = 1
3593
3594    stage = outfile.variables['stage']
3595    xmomentum = outfile.variables['xmomentum']
3596    ymomentum = outfile.variables['ymomentum']
3597
3598    outfile.variables['time'][:] = times   #Store time relative
3599
3600    if verbose: print 'Converting quantities'
3601    n = number_of_times
3602    for j in range(number_of_times):
3603        # load in files
3604        elevation_meta, elevation_grid = \
3605           _read_asc(elevation_dir + os.sep + elevation_files[j])
3606
3607        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3608        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3609
3610        #print "elevation_grid",elevation_grid
3611        #cut matrix to desired size
3612        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3613        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3614        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3615        #print "elevation_grid",elevation_grid
3616        # handle missing values
3617        missing = (elevation_grid == elevation_meta['NODATA_value'])
3618        if sometrue (missing):
3619            if fail_on_NaN:
3620                msg = 'File %s contains missing values'\
3621                      %(elevation_files[j])
3622                raise DataMissingValuesError, msg
3623            else:
3624                elevation_grid = elevation_grid*(missing==0) + \
3625                                 missing*elevation_NaN_filler
3626
3627
3628        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3629        i = 0
3630        for k in range(number_of_latitudes):      #Y direction
3631            for l in range(number_of_longitudes): #X direction
3632                w = zscale*elevation_grid[k,l] + mean_stage
3633                stage[j,i] = w
3634                h = w - z[i]
3635                xmomentum[j,i] = u_momentum_grid[k,l]*h
3636                ymomentum[j,i] = v_momentum_grid[k,l]*h
3637                i += 1
3638    outfile.close()
3639
3640def _get_min_max_indexes(latitudes,longitudes,
3641                        minlat=None, maxlat=None,
3642                        minlon=None, maxlon=None):
3643    """
3644    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3645    specified with min/max lat/long
3646
3647    Think of the latitudes and longitudes describing a 2d surface.
3648    The area returned is, if possible, just big enough to cover the
3649    inputed max/min area. (This will not be possible if the max/min area
3650    has a section outside of the latitudes/longitudes area.)
3651
3652    assume latitudess & longitudes are sorted,
3653    long - from low to high (west to east, eg 148 - 151)
3654    lat - from high to low (north to south, eg -35 - -38)
3655    """
3656
3657    # reverse order of lat, so it's in ascending order
3658    latitudes.reverse()
3659    largest_lat_index = len(latitudes)-1
3660    #Cut out a smaller extent.
3661    if minlat == None:
3662        lat_min_index = 0
3663    else:
3664        lat_min_index = searchsorted(latitudes, minlat)-1
3665        if lat_min_index <0:
3666            lat_min_index = 0
3667
3668
3669    if maxlat == None:
3670        lat_max_index = largest_lat_index #len(latitudes)
3671    else:
3672        lat_max_index = searchsorted(latitudes, maxlat)
3673        if lat_max_index > largest_lat_index:
3674            lat_max_index = largest_lat_index
3675
3676    if minlon == None:
3677        lon_min_index = 0
3678    else:
3679        lon_min_index = searchsorted(longitudes, minlon)-1
3680        if lon_min_index <0:
3681            lon_min_index = 0
3682
3683    if maxlon == None:
3684        lon_max_index = len(longitudes)
3685    else:
3686        lon_max_index = searchsorted(longitudes, maxlon)
3687
3688    #Take into account that the latitude list was reversed
3689    latitudes.reverse() # Python passes by reference, need to swap back
3690    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3691                                   largest_lat_index - lat_min_index
3692    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3693    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3694
3695    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3696
3697
3698def _read_asc(filename, verbose=False):
3699    """Read esri file from the following ASCII format (.asc)
3700
3701    Example:
3702
3703    ncols         3121
3704    nrows         1800
3705    xllcorner     722000
3706    yllcorner     5893000
3707    cellsize      25
3708    NODATA_value  -9999
3709    138.3698 137.4194 136.5062 135.5558 ..........
3710
3711    """
3712
3713    datafile = open(filename)
3714
3715    if verbose: print 'Reading DEM from %s' %(filename)
3716    lines = datafile.readlines()
3717    datafile.close()
3718
3719    if verbose: print 'Got', len(lines), ' lines'
3720
3721    ncols = int(lines.pop(0).split()[1].strip())
3722    nrows = int(lines.pop(0).split()[1].strip())
3723    xllcorner = float(lines.pop(0).split()[1].strip())
3724    yllcorner = float(lines.pop(0).split()[1].strip())
3725    cellsize = float(lines.pop(0).split()[1].strip())
3726    NODATA_value = float(lines.pop(0).split()[1].strip())
3727
3728    assert len(lines) == nrows
3729
3730    #Store data
3731    grid = []
3732
3733    n = len(lines)
3734    for i, line in enumerate(lines):
3735        cells = line.split()
3736        assert len(cells) == ncols
3737        grid.append(array([float(x) for x in cells]))
3738    grid = array(grid)
3739
3740    return {'xllcorner':xllcorner,
3741            'yllcorner':yllcorner,
3742            'cellsize':cellsize,
3743            'NODATA_value':NODATA_value}, grid
3744
3745
3746# FIXME (Ole): Is this doing anything at all?
3747def sww2timeseries(swwfile,
3748                   gauge_filename,
3749                   gauge_data_outname,
3750                   quantity = None,
3751                   time_min = None,
3752                   time_max = None,
3753                   verbose = False):
3754
3755    """Read SWW file and extract time series for prescribed quantities at
3756    gauge locations.
3757
3758    The gauge locations are defined in gauge_filename. This file should be
3759    in the form: gaugename, easting, northing, and should be stored in a
3760    .csv or .xya file.
3761
3762    Time series data at the gauges can be written to file (for example,
3763    Benfield requested raw data) with the default being no data written.
3764
3765    The parameter quantity must be the name of an existing quantity or
3766    an expression involving existing quantities. The default is
3767    'depth'.
3768
3769    The user can define a list of quantities. The possibilities are
3770    the conserved quantitues of the shallow water wave equation and other
3771    quantities which can be derived from those, i.e.
3772    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3773
3774    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3775    Note, units of momentum are m^2/s and depth is m.
3776
3777    Velocity is absolute momentum divided by depth. (Confirming correct units:
3778    vel = abs mom / depth = (m^2/s)/m = m/s.)
3779
3780    Bearing returns the angle of the velocity vector from North.
3781
3782    If time_min and time_max is given, output plotted in that time range.
3783    The default is to plot the entire range of the time evident in sww file.
3784
3785    The export graphic format in 'png' and will be stored in the same
3786    directory as the input file.
3787    """
3788
3789    if quantity is None: quantity = 'depth'
3790
3791    # extract gauge locations from gauge file
3792
3793    # extract all quantities from swwfile (f = file_function)
3794    if time_min is None: time_min = min(f.get_time())
3795    if time_max is None: time_max = max(f.get_time())
3796
3797    # loop through appropriate range of time
3798    # plot prescribed quantities and export data if requested
3799
3800    #if gauge_data_outname is None:
3801
3802    return
Note: See TracBrowser for help on using the repository browser.