source: inundation/pyvolution/data_manager.py @ 1835

Last change on this file since 1835 was 1835, checked in by ole, 19 years ago

Implemented tms file format (like sww without x, y)
Fixed broken test in sww2ers

File size: 84.1 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII foramt of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18
19.dem: NetCDF representation of regular DEM data
20
21.tsh: ASCII format for storing meshes and associated boundary and region info
22.msh: NetCDF format for storing meshes and associated boundary and region info
23
24.nc: Native ferret NetCDF format
25.geo: Houdinis ascii geometry format (?)
26
27
28A typical dataflow can be described as follows
29
30Manually created files:
31ASC, PRJ:     Digital elevation models (gridded)
32TSH:          Triangular meshes (e.g. created from pmesh)
33NC            Model outputs for use as boundary conditions (e.g from MOST)
34
35
36AUTOMATICALLY CREATED FILES:
37
38ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
39
40NC -> SWW: Conversion of MOST bundary files to boundary sww
41
42PTS + TSH -> TSH with elevation: Least squares fit
43
44TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
45
46TSH + Boundary SWW -> SWW: Simluation using pyvolution
47
48"""
49
50from Numeric import concatenate
51
52from coordinate_transforms.geo_reference import Geo_reference
53
54def make_filename(s):
55    """Transform argument string into a suitable filename
56    """
57
58    s = s.strip()
59    s = s.replace(' ', '_')
60    s = s.replace('(', '')
61    s = s.replace(')', '')
62    s = s.replace('__', '_')
63
64    return s
65
66
67def check_dir(path, verbose=None):
68    """Check that specified path exists.
69    If path does not exist it will be created if possible
70
71    USAGE:
72       checkdir(path, verbose):
73
74    ARGUMENTS:
75        path -- Directory
76        verbose -- Flag verbose output (default: None)
77
78    RETURN VALUE:
79        Verified path including trailing separator
80
81    """
82
83    import os, sys
84    import os.path
85
86    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
87        unix = 0
88    else:
89        unix = 1
90
91
92    if path[-1] != os.sep:
93        path = path + os.sep  # Add separator for directories
94
95    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
96    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
97        try:
98            exitcode=os.mkdir(path)
99
100            # Change access rights if possible
101            #
102            if unix:
103                exitcode=os.system('chmod 775 '+path)
104            else:
105                pass  # FIXME: What about acces rights under Windows?
106
107            if verbose: print 'MESSAGE: Directory', path, 'created.'
108
109        except:
110            print 'WARNING: Directory', path, 'could not be created.'
111            if unix:
112                path = '/tmp/'
113            else:
114                path = 'C:'
115
116            print 'Using directory %s instead' %path
117
118    return(path)
119
120
121
122def del_dir(path):
123    """Recursively delete directory path and all its contents
124    """
125
126    import os
127
128    if os.path.isdir(path):
129        for file in os.listdir(path):
130            X = os.path.join(path, file)
131
132
133            if os.path.isdir(X) and not os.path.islink(X):
134                del_dir(X)
135            else:
136                try:
137                    os.remove(X)
138                except:
139                    print "Could not remove file %s" %X
140
141        os.rmdir(path)
142
143
144
145def create_filename(datadir, filename, format, size=None, time=None):
146
147    import os
148    #from config import data_dir
149
150    FN = check_dir(datadir) + filename
151
152    if size is not None:
153        FN += '_size%d' %size
154
155    if time is not None:
156        FN += '_time%.2f' %time
157
158    FN += '.' + format
159    return FN
160
161
162def get_files(datadir, filename, format, size):
163    """Get all file (names) with gven name, size and format
164    """
165
166    import glob
167
168    import os
169    #from config import data_dir
170
171    dir = check_dir(datadir)
172
173    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
174    return glob.glob(pattern)
175
176
177
178#Generic class for storing output to e.g. visualisation or checkpointing
179class Data_format:
180    """Generic interface to data formats
181    """
182
183
184    def __init__(self, domain, extension, mode = 'w'):
185        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
186                                        '''   'w' (write)'''+\
187                                        '''   'r' (read)''' +\
188                                        '''   'a' (append)'''
189
190        #Create filename
191        #self.filename = create_filename(domain.get_datadir(),
192        #domain.get_name(), extension, len(domain))
193
194
195        self.filename = create_filename(domain.get_datadir(),
196                        domain.get_name(), extension)
197
198        #print 'F', self.filename
199        self.timestep = 0
200        self.number_of_volumes = len(domain)
201        self.domain = domain
202
203
204        #FIXME: Should we have a general set_precision function?
205
206
207
208#Class for storing output to e.g. visualisation
209class Data_format_sww(Data_format):
210    """Interface to native NetCDF format (.sww) for storing model output
211
212    There are two kinds of data
213
214    1: Constant data: Vertex coordinates and field values. Stored once
215    2: Variable data: Conserved quantities. Stored once per timestep.
216
217    All data is assumed to reside at vertex locations.
218    """
219
220
221    def __init__(self, domain, mode = 'w',\
222                 max_size = 2000000000,
223                 recursion=False):
224        from Scientific.IO.NetCDF import NetCDFFile
225        from Numeric import Int, Float, Float32
226
227        self.precision = Float32 #Use single precision
228        if hasattr(domain, 'max_size'):
229            self.max_size = domain.max_size #file size max is 2Gig
230        else:
231            self.max_size = max_size
232        self.recursion = recursion
233        self.mode = mode
234
235        Data_format.__init__(self, domain, 'sww', mode)
236
237
238        # NetCDF file definition
239        fid = NetCDFFile(self.filename, mode)
240
241        if mode == 'w':
242
243            #Create new file
244            fid.institution = 'Geoscience Australia'
245            fid.description = 'Output from pyvolution suitable for plotting'
246
247            if domain.smooth:
248                fid.smoothing = 'Yes'
249            else:
250                fid.smoothing = 'No'
251
252            fid.order = domain.default_order
253           
254            if hasattr(domain, 'texture'):                 
255                fid.texture = domain.texture
256            #else:                         
257            #    fid.texture = 'None'       
258
259            #Reference point
260            #Start time in seconds since the epoch (midnight 1/1/1970)
261            #FIXME: Use Georef
262            fid.starttime = domain.starttime
263
264            # dimension definitions
265            fid.createDimension('number_of_volumes', self.number_of_volumes)
266            fid.createDimension('number_of_vertices', 3)
267
268            if domain.smooth is True:
269                fid.createDimension('number_of_points', len(domain.vertexlist))
270            else:
271                fid.createDimension('number_of_points', 3*self.number_of_volumes)
272
273            fid.createDimension('number_of_timesteps', None) #extensible
274
275            # variable definitions
276            fid.createVariable('x', self.precision, ('number_of_points',))
277            fid.createVariable('y', self.precision, ('number_of_points',))
278            fid.createVariable('elevation', self.precision, ('number_of_points',))
279            if domain.geo_reference is not None:
280                domain.geo_reference.write_NetCDF(fid)
281               
282            #FIXME: Backwards compatibility
283            fid.createVariable('z', self.precision, ('number_of_points',))
284            #################################
285
286            fid.createVariable('volumes', Int, ('number_of_volumes',
287                                                'number_of_vertices'))
288
289            fid.createVariable('time', self.precision,
290                               ('number_of_timesteps',))
291
292            fid.createVariable('stage', self.precision,
293                               ('number_of_timesteps',
294                                'number_of_points'))
295
296            fid.createVariable('xmomentum', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('ymomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304        #Close
305        fid.close()
306
307
308    def store_connectivity(self):
309        """Specialisation of store_connectivity for net CDF format
310
311        Writes x,y,z coordinates of triangles constituting
312        the bed elevation.
313        """
314
315        from Scientific.IO.NetCDF import NetCDFFile
316
317        from Numeric import concatenate, Int
318
319        domain = self.domain
320
321        #Get NetCDF
322        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
323
324        # Get the variables
325        x = fid.variables['x']
326        y = fid.variables['y']
327        z = fid.variables['elevation']
328
329        volumes = fid.variables['volumes']
330
331        # Get X, Y and bed elevation Z
332        Q = domain.quantities['elevation']
333        X,Y,Z,V = Q.get_vertex_values(xy=True,
334                                      precision = self.precision)
335
336
337
338        x[:] = X.astype(self.precision)
339        y[:] = Y.astype(self.precision)
340        z[:] = Z.astype(self.precision)
341
342        #FIXME: Backwards compatibility
343        z = fid.variables['z']
344        z[:] = Z.astype(self.precision)
345        ################################
346
347        volumes[:] = V.astype(volumes.typecode())
348
349        #Close
350        fid.close()
351
352    def store_timestep(self, names):
353        """Store time and named quantities to file
354        """
355        from Scientific.IO.NetCDF import NetCDFFile
356        import types
357        from time import sleep
358        from os import stat
359
360        minimum_allowed_depth = 0.001
361        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
362        from Numeric import choose
363
364        #Get NetCDF
365        retries = 0
366        file_open = False
367        while not file_open and retries < 10:
368            try:
369                fid = NetCDFFile(self.filename, 'a') #Open existing file
370            except IOError:
371                #This could happen if someone was reading the file.
372                #In that case, wait a while and try again
373                msg = 'Warning (store_timestep): File %s could not be opened'\
374                      %self.filename
375                msg += ' - trying step %s again' %self.domain.time
376                print msg
377                retries += 1
378                sleep(1)
379            else:
380                file_open = True
381
382        if not file_open:
383            msg = 'File %s could not be opened for append' %self.filename
384            raise msg
385
386
387
388        #Check to see if the file is already too big:
389        time = fid.variables['time']
390        i = len(time)+1
391        file_size = stat(self.filename)[6]
392        file_size_increase =  file_size/i
393        if file_size + file_size_increase > self.max_size*(2**self.recursion):
394            #in order to get the file name and start time correct,
395            #I change the domian.filename and domain.starttime.
396            #This is the only way to do this without changing
397            #other modules (I think).
398
399            #write a filename addon that won't break swollens reader
400            #(10.sww is bad)
401            filename_ext = '_time_%s'%self.domain.time
402            filename_ext = filename_ext.replace('.', '_')
403            #remember the old filename, then give domain a
404            #name with the extension
405            old_domain_filename = self.domain.filename
406            if not self.recursion:
407                self.domain.filename = self.domain.filename+filename_ext
408
409            #change the domain starttime to the current time
410            old_domain_starttime = self.domain.starttime
411            self.domain.starttime = self.domain.time
412
413            #build a new data_structure.
414            next_data_structure=\
415                Data_format_sww(self.domain, mode=self.mode,\
416                                max_size = self.max_size,\
417                                recursion = self.recursion+1)
418            if not self.recursion:
419                print '    file_size = %s'%file_size
420                print '    saving file to %s'%next_data_structure.filename
421            #set up the new data_structure
422            self.domain.writer = next_data_structure
423
424            #FIXME - could be cleaner to use domain.store_timestep etc.
425            next_data_structure.store_connectivity()
426            next_data_structure.store_timestep(names)
427            fid.sync()
428            fid.close()
429
430            #restore the old starttime and filename
431            self.domain.starttime = old_domain_starttime
432            self.domain.filename = old_domain_filename
433        else:
434            self.recursion = False
435            domain = self.domain
436
437            # Get the variables
438            time = fid.variables['time']
439            stage = fid.variables['stage']
440            xmomentum = fid.variables['xmomentum']
441            ymomentum = fid.variables['ymomentum']
442            i = len(time)
443
444            #Store time
445            time[i] = self.domain.time
446
447
448            if type(names) not in [types.ListType, types.TupleType]:
449                names = [names]
450
451            for name in names:
452                # Get quantity
453                Q = domain.quantities[name]
454                A,V = Q.get_vertex_values(xy = False,
455                                          precision = self.precision)
456
457                #FIXME: Make this general (see below)
458                if name == 'stage':
459                    z = fid.variables['elevation']
460                    #print z[:]
461                    #print A-z[:]
462                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
463                    stage[i,:] = A.astype(self.precision)
464                elif name == 'xmomentum':
465                    xmomentum[i,:] = A.astype(self.precision)
466                elif name == 'ymomentum':
467                    ymomentum[i,:] = A.astype(self.precision)
468
469                #As in....
470                #eval( name + '[i,:] = A.astype(self.precision)' )
471                #FIXME: But we need a UNIT test for that before refactoring
472
473
474
475            #Flush and close
476            fid.sync()
477            fid.close()
478
479
480
481#Class for handling checkpoints data
482class Data_format_cpt(Data_format):
483    """Interface to native NetCDF format (.cpt)
484    """
485
486
487    def __init__(self, domain, mode = 'w'):
488        from Scientific.IO.NetCDF import NetCDFFile
489        from Numeric import Int, Float, Float
490
491        self.precision = Float #Use full precision
492
493        Data_format.__init__(self, domain, 'sww', mode)
494
495
496        # NetCDF file definition
497        fid = NetCDFFile(self.filename, mode)
498
499        if mode == 'w':
500            #Create new file
501            fid.institution = 'Geoscience Australia'
502            fid.description = 'Checkpoint data'
503            #fid.smooth = domain.smooth
504            fid.order = domain.default_order
505
506            # dimension definitions
507            fid.createDimension('number_of_volumes', self.number_of_volumes)
508            fid.createDimension('number_of_vertices', 3)
509
510            #Store info at all vertices (no smoothing)
511            fid.createDimension('number_of_points', 3*self.number_of_volumes)
512            fid.createDimension('number_of_timesteps', None) #extensible
513
514            # variable definitions
515
516            #Mesh
517            fid.createVariable('x', self.precision, ('number_of_points',))
518            fid.createVariable('y', self.precision, ('number_of_points',))
519
520
521            fid.createVariable('volumes', Int, ('number_of_volumes',
522                                                'number_of_vertices'))
523
524            fid.createVariable('time', self.precision,
525                               ('number_of_timesteps',))
526
527            #Allocate space for all quantities
528            for name in domain.quantities.keys():
529                fid.createVariable(name, self.precision,
530                                   ('number_of_timesteps',
531                                    'number_of_points'))
532
533        #Close
534        fid.close()
535
536
537    def store_checkpoint(self):
538        """
539        Write x,y coordinates of triangles.
540        Write connectivity (
541        constituting
542        the bed elevation.
543        """
544
545        from Scientific.IO.NetCDF import NetCDFFile
546
547        from Numeric import concatenate
548
549        domain = self.domain
550
551        #Get NetCDF
552        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
553
554        # Get the variables
555        x = fid.variables['x']
556        y = fid.variables['y']
557
558        volumes = fid.variables['volumes']
559
560        # Get X, Y and bed elevation Z
561        Q = domain.quantities['elevation']
562        X,Y,Z,V = Q.get_vertex_values(xy=True,
563                                      precision = self.precision)
564
565
566
567        x[:] = X.astype(self.precision)
568        y[:] = Y.astype(self.precision)
569        z[:] = Z.astype(self.precision)
570
571        volumes[:] = V
572
573        #Close
574        fid.close()
575
576
577    def store_timestep(self, name):
578        """Store time and named quantity to file
579        """
580        from Scientific.IO.NetCDF import NetCDFFile
581        from time import sleep
582
583        #Get NetCDF
584        retries = 0
585        file_open = False
586        while not file_open and retries < 10:
587            try:
588                fid = NetCDFFile(self.filename, 'a') #Open existing file
589            except IOError:
590                #This could happen if someone was reading the file.
591                #In that case, wait a while and try again
592                msg = 'Warning (store_timestep): File %s could not be opened'\
593                          %self.filename
594                msg += ' - trying again'
595                print msg
596                retries += 1
597                sleep(1)
598            else:
599                file_open = True
600
601            if not file_open:
602                msg = 'File %s could not be opened for append' %self.filename
603            raise msg
604
605
606        domain = self.domain
607
608        # Get the variables
609        time = fid.variables['time']
610        stage = fid.variables['stage']
611        i = len(time)
612
613        #Store stage
614        time[i] = self.domain.time
615
616        # Get quantity
617        Q = domain.quantities[name]
618        A,V = Q.get_vertex_values(xy=False,
619                                  precision = self.precision)
620
621        stage[i,:] = A.astype(self.precision)
622
623        #Flush and close
624        fid.sync()
625        fid.close()
626
627
628
629
630
631#Function for storing xya output
632#FIXME Not done yet for this version
633class Data_format_xya(Data_format):
634    """Generic interface to data formats
635    """
636
637
638    def __init__(self, domain, mode = 'w'):
639        from Scientific.IO.NetCDF import NetCDFFile
640        from Numeric import Int, Float, Float32
641
642        self.precision = Float32 #Use single precision
643
644        Data_format.__init__(self, domain, 'xya', mode)
645
646
647
648    #FIXME -This is the old xya format
649    def store_all(self):
650        """Specialisation of store all for xya format
651
652        Writes x,y,z coordinates of triangles constituting
653        the bed elevation.
654        """
655
656        from Numeric import concatenate
657
658        domain = self.domain
659
660        fd = open(self.filename, 'w')
661
662
663        if domain.smooth is True:
664            number_of_points =  len(domain.vertexlist)
665        else:
666            number_of_points = 3*self.number_of_volumes
667
668        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
669
670        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
671                 " # <vertex #> <x> <y> [attributes]" + "\n")
672
673
674        # Get X, Y, bed elevation and friction (index=0,1)
675        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
676                                           indices = (0,1), precision = self.precision)
677
678        bed_eles = A[:,0]
679        fricts = A[:,1]
680
681        # Get stage (index=0)
682        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
683                                       indices = (0,), precision = self.precision)
684
685        stages = B[:,0]
686
687        #<vertex #> <x> <y> [attributes]
688        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
689                                               stages, fricts):
690
691            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
692            fd.write(s)
693
694        #close
695        fd.close()
696
697
698    def store_timestep(self, t, V0, V1, V2):
699        """Store time, water heights (and momentums) to file
700        """
701        pass
702
703
704#Auxiliary
705def write_obj(filename,x,y,z):
706    """Store x,y,z vectors into filename (obj format)
707       Vectors are assumed to have dimension (M,3) where
708       M corresponds to the number elements.
709       triangles are assumed to be disconnected
710
711       The three numbers in each vector correspond to three vertices,
712
713       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
714
715    """
716    #print 'Writing obj to %s' % filename
717
718    import os.path
719
720    root, ext = os.path.splitext(filename)
721    if ext == '.obj':
722        FN = filename
723    else:
724        FN = filename + '.obj'
725
726
727    outfile = open(FN, 'wb')
728    outfile.write("# Triangulation as an obj file\n")
729
730    M, N = x.shape
731    assert N==3  #Assuming three vertices per element
732
733    for i in range(M):
734        for j in range(N):
735            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
736
737    for i in range(M):
738        base = i*N
739        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
740
741    outfile.close()
742
743
744#########################################################
745#Conversion routines
746########################################################
747
748def sww2obj(basefilename, size):
749    """Convert netcdf based data output to obj
750    """
751    from Scientific.IO.NetCDF import NetCDFFile
752
753    from Numeric import Float, zeros
754
755    #Get NetCDF
756    FN = create_filename('.', basefilename, 'sww', size)
757    print 'Reading from ', FN
758    fid = NetCDFFile(FN, 'r')  #Open existing file for read
759
760
761    # Get the variables
762    x = fid.variables['x']
763    y = fid.variables['y']
764    z = fid.variables['elevation']
765    time = fid.variables['time']
766    stage = fid.variables['stage']
767
768    M = size  #Number of lines
769    xx = zeros((M,3), Float)
770    yy = zeros((M,3), Float)
771    zz = zeros((M,3), Float)
772
773    for i in range(M):
774        for j in range(3):
775            xx[i,j] = x[i+j*M]
776            yy[i,j] = y[i+j*M]
777            zz[i,j] = z[i+j*M]
778
779    #Write obj for bathymetry
780    FN = create_filename('.', basefilename, 'obj', size)
781    write_obj(FN,xx,yy,zz)
782
783
784    #Now read all the data with variable information, combine with
785    #x,y info and store as obj
786
787    for k in range(len(time)):
788        t = time[k]
789        print 'Processing timestep %f' %t
790
791        for i in range(M):
792            for j in range(3):
793                zz[i,j] = stage[k,i+j*M]
794
795
796        #Write obj for variable data
797        #FN = create_filename(basefilename, 'obj', size, time=t)
798        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
799        write_obj(FN,xx,yy,zz)
800
801
802def dat2obj(basefilename):
803    """Convert line based data output to obj
804    FIXME: Obsolete?
805    """
806
807    import glob, os
808    from config import data_dir
809
810
811    #Get bathymetry and x,y's
812    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
813
814    from Numeric import zeros, Float
815
816    M = len(lines)  #Number of lines
817    x = zeros((M,3), Float)
818    y = zeros((M,3), Float)
819    z = zeros((M,3), Float)
820
821    ##i = 0
822    for i, line in enumerate(lines):
823        tokens = line.split()
824        values = map(float,tokens)
825
826        for j in range(3):
827            x[i,j] = values[j*3]
828            y[i,j] = values[j*3+1]
829            z[i,j] = values[j*3+2]
830
831        ##i += 1
832
833
834    #Write obj for bathymetry
835    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
836
837
838    #Now read all the data files with variable information, combine with
839    #x,y info
840    #and store as obj
841
842    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
843
844    for filename in files:
845        print 'Processing %s' % filename
846
847        lines = open(data_dir+os.sep+filename,'r').readlines()
848        assert len(lines) == M
849        root, ext = os.path.splitext(filename)
850
851        #Get time from filename
852        i0 = filename.find('_time=')
853        if i0 == -1:
854            #Skip bathymetry file
855            continue
856
857        i0 += 6  #Position where time starts
858        i1 = filename.find('.dat')
859
860        if i1 > i0:
861            t = float(filename[i0:i1])
862        else:
863            raise 'Hmmmm'
864
865
866
867        ##i = 0
868        for i, line in enumerate(lines):
869            tokens = line.split()
870            values = map(float,tokens)
871
872            for j in range(3):
873                z[i,j] = values[j]
874
875            ##i += 1
876
877        #Write obj for variable data
878        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
879
880
881def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
882    """Read netcdf filename1, pick timesteps first:step:last and save to
883    nettcdf file filename2
884    """
885    from Scientific.IO.NetCDF import NetCDFFile
886
887    #Get NetCDF
888    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
889    outfile = NetCDFFile(filename2, 'w')  #Open new file
890
891
892    #Copy dimensions
893    for d in infile.dimensions:
894        outfile.createDimension(d, infile.dimensions[d])
895
896    for name in infile.variables:
897        var = infile.variables[name]
898        outfile.createVariable(name, var.typecode(), var.dimensions)
899
900
901    #Copy the static variables
902    for name in infile.variables:
903        if name == 'time' or name == 'stage':
904            pass
905        else:
906            #Copy
907            outfile.variables[name][:] = infile.variables[name][:]
908
909    #Copy selected timesteps
910    time = infile.variables['time']
911    stage = infile.variables['stage']
912
913    newtime = outfile.variables['time']
914    newstage = outfile.variables['stage']
915
916    if last is None:
917        last = len(time)
918
919    selection = range(first, last, step)
920    for i, j in enumerate(selection):
921        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
922        newtime[i] = time[j]
923        newstage[i,:] = stage[j,:]
924
925    #Close
926    infile.close()
927    outfile.close()
928
929
930#Get data objects
931def get_dataobject(domain, mode='w'):
932    """Return instance of class of given format using filename
933    """
934
935    cls = eval('Data_format_%s' %domain.format)
936    return cls(domain, mode)
937
938def xya2pts(basename_in, basename_out=None, verbose=False,
939            #easting_min=None, easting_max=None,
940            #northing_min=None, northing_max=None,
941            stride = 1,           
942            attribute_name = 'elevation',
943            z_func = None):
944    """Read points data from ascii (.xya)
945
946    Example:
947
948              x(m)        y(m)        z(m)
949         0.00000e+00  0.00000e+00  1.3535000e-01
950         0.00000e+00  1.40000e-02  1.3535000e-01
951
952   
953
954    Convert to NetCDF pts format which is
955
956    points:  (Nx2) Float array
957    elevation: N Float array
958
959    Only lines that contain three numeric values are processed
960
961    If z_func is specified, it will be applied to the third field
962    """
963
964    import os 
965    #from Scientific.IO.NetCDF import NetCDFFile
966    from Numeric import Float, arrayrange, concatenate
967
968    root, ext = os.path.splitext(basename_in)
969
970    if ext == '': ext = '.xya'
971
972    #Get NetCDF
973    infile = open(root + ext, 'r')  #Open existing xya file for read
974
975    if verbose: print 'Reading xya points from %s' %(root + ext)
976
977    points = []
978    attribute = []
979    for i, line in enumerate(infile.readlines()):
980
981        if i % stride != 0: continue
982       
983        fields = line.split()
984
985        try:
986            assert len(fields) == 3
987        except:   
988            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
989
990        try:
991            x = float( fields[0] )
992            y = float( fields[1] )
993            z = float( fields[2] )           
994        except:
995            continue
996
997        points.append( [x, y] )
998
999        if callable(z_func):
1000            attribute.append(z_func(z))           
1001        else:   
1002            attribute.append(z)
1003
1004
1005    #Get output file
1006    if basename_out == None:
1007        ptsname = root + '.pts'
1008    else:
1009        ptsname = basename_out + '.pts'
1010
1011    if verbose: print 'Store to NetCDF file %s' %ptsname
1012    write_ptsfile(ptsname, points, attribute, attribute_name)
1013
1014
1015def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1016    """Write points and associated attribute to pts (NetCDF) format
1017    """
1018
1019    from Numeric import Float
1020   
1021    if attribute_name is None:
1022        attribute_name = 'attribute'       
1023   
1024
1025    from Scientific.IO.NetCDF import NetCDFFile
1026   
1027    # NetCDF file definition
1028    outfile = NetCDFFile(ptsname, 'w')
1029
1030
1031    #Create new file
1032    outfile.institution = 'Geoscience Australia'
1033    outfile.description = 'NetCDF pts format for compact and '\
1034                          'portable storage of spatial point data'
1035   
1036
1037    #Georeferencing
1038    from coordinate_transforms.geo_reference import Geo_reference
1039    Geo_reference().write_NetCDF(outfile)
1040   
1041
1042    outfile.createDimension('number_of_points', len(points))
1043    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1044
1045    # variable definitions
1046    outfile.createVariable('points', Float, ('number_of_points',
1047                                             'number_of_dimensions'))
1048    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1049
1050    # Get handles to the variables
1051    nc_points = outfile.variables['points']
1052    nc_attribute = outfile.variables[attribute_name]
1053
1054    #Store data
1055    nc_points[:, :] = points
1056    nc_attribute[:] = attribute
1057
1058    outfile.close()
1059   
1060
1061def dem2pts(basename_in, basename_out=None, verbose=False,
1062            easting_min=None, easting_max=None,
1063            northing_min=None, northing_max=None):
1064    """Read Digitial Elevation model from the following NetCDF format (.dem)
1065
1066    Example:
1067
1068    ncols         3121
1069    nrows         1800
1070    xllcorner     722000
1071    yllcorner     5893000
1072    cellsize      25
1073    NODATA_value  -9999
1074    138.3698 137.4194 136.5062 135.5558 ..........
1075
1076    Convert to NetCDF pts format which is
1077
1078    points:  (Nx2) Float array
1079    elevation: N Float array
1080    """
1081
1082    #FIXME: Can this be written feasibly using write_pts?
1083
1084    import os
1085    from Scientific.IO.NetCDF import NetCDFFile
1086    from Numeric import Float, zeros, reshape
1087
1088    root = basename_in
1089
1090    #Get NetCDF
1091    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1092
1093    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1094
1095    ncols = infile.ncols[0]
1096    nrows = infile.nrows[0]
1097    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1098    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1099    cellsize = infile.cellsize[0]
1100    NODATA_value = infile.NODATA_value[0]
1101    dem_elevation = infile.variables['elevation']
1102
1103    zone = infile.zone[0]
1104    false_easting = infile.false_easting[0]
1105    false_northing = infile.false_northing[0]
1106
1107    #Text strings
1108    projection = infile.projection
1109    datum = infile.datum
1110    units = infile.units
1111
1112
1113    #Get output file
1114    if basename_out == None:
1115        ptsname = root + '.pts'
1116    else:
1117        ptsname = basename_out + '.pts'
1118
1119    if verbose: print 'Store to NetCDF file %s' %ptsname
1120    # NetCDF file definition
1121    outfile = NetCDFFile(ptsname, 'w')
1122
1123    #Create new file
1124    outfile.institution = 'Geoscience Australia'
1125    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1126                      'of spatial point data'
1127    #assign default values
1128    if easting_min is None: easting_min = xllcorner
1129    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1130    if northing_min is None: northing_min = yllcorner
1131    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1132
1133    #compute offsets to update georeferencing
1134    easting_offset = xllcorner - easting_min
1135    northing_offset = yllcorner - northing_min
1136
1137    #Georeferencing
1138    outfile.zone = zone
1139    outfile.xllcorner = easting_min #Easting of lower left corner
1140    outfile.yllcorner = northing_min #Northing of lower left corner
1141    outfile.false_easting = false_easting
1142    outfile.false_northing = false_northing
1143
1144    outfile.projection = projection
1145    outfile.datum = datum
1146    outfile.units = units
1147
1148
1149    #Grid info (FIXME: probably not going to be used, but heck)
1150    outfile.ncols = ncols
1151    outfile.nrows = nrows
1152
1153
1154    # dimension definitions
1155    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1156    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1157    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1158    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1159
1160    # variable definitions
1161    outfile.createVariable('points', Float, ('number_of_points',
1162                                             'number_of_dimensions'))
1163    outfile.createVariable('elevation', Float, ('number_of_points',))
1164
1165    # Get handles to the variables
1166    points = outfile.variables['points']
1167    elevation = outfile.variables['elevation']
1168
1169    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1170
1171    #Store data
1172    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1173    global_index = 0
1174    for i in range(nrows):
1175        if verbose and i%((nrows+10)/10)==0:
1176            print 'Processing row %d of %d' %(i, nrows)       
1177       
1178        lower_index = global_index
1179        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1180        telev =  zeros(ncols_in_bounding_box, Float)
1181        local_index = 0
1182
1183        y = (nrows-i)*cellsize + yllcorner
1184        for j in range(ncols):
1185
1186            x = j*cellsize + xllcorner
1187            if easting_min <= x <= easting_max and \
1188               northing_min <= y <= northing_max:
1189                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1190                telev[local_index] = dem_elevation_r[i, j]
1191                global_index += 1
1192                local_index += 1
1193
1194        upper_index = global_index
1195
1196        if upper_index == lower_index + ncols_in_bounding_box:
1197            points[lower_index:upper_index, :] = tpoints
1198            elevation[lower_index:upper_index] = telev
1199
1200    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1201
1202    infile.close()
1203    outfile.close()
1204
1205
1206def sww2asc(basename_in, basename_out = None,
1207            quantity = None,
1208            timestep = None,
1209            reduction = None,
1210            cellsize = 10,
1211            verbose = False,
1212            origin = None):
1213    """Read SWW file and convert to Digitial Elevation model format (.asc)
1214
1215    Example:
1216
1217    ncols         3121
1218    nrows         1800
1219    xllcorner     722000
1220    yllcorner     5893000
1221    cellsize      25
1222    NODATA_value  -9999
1223    138.3698 137.4194 136.5062 135.5558 ..........
1224
1225    Also write accompanying file with same basename_in but extension .prj
1226    used to fix the UTM zone, datum, false northings and eastings.
1227
1228    The prj format is assumed to be as
1229
1230    Projection    UTM
1231    Zone          56
1232    Datum         WGS84
1233    Zunits        NO
1234    Units         METERS
1235    Spheroid      WGS84
1236    Xshift        0.0000000000
1237    Yshift        10000000.0000000000
1238    Parameters
1239
1240
1241    if quantity is given, out values from quantity otherwise default to
1242    elevation
1243
1244    if timestep (an index) is given, output quantity at that timestep
1245
1246    if reduction is given use that to reduce quantity over all timesteps.
1247
1248    """
1249    from Numeric import array, Float, concatenate, NewAxis, zeros,\
1250         sometrue
1251
1252
1253    #FIXME: Should be variable
1254    datum = 'WGS84'
1255    false_easting = 500000
1256    false_northing = 10000000
1257    NODATA_value = -9999
1258
1259    if quantity is None:
1260        quantity = 'elevation'
1261
1262    if reduction is None:
1263        reduction = max
1264
1265    if basename_out is None:
1266        basename_out = basename_in + '_%s' %quantity
1267
1268    swwfile = basename_in + '.sww'
1269    ascfile = basename_out + '.asc'
1270    prjfile = basename_out + '.prj'
1271
1272
1273    if verbose: print 'Reading from %s' %swwfile
1274    #Read sww file
1275    from Scientific.IO.NetCDF import NetCDFFile
1276    fid = NetCDFFile(swwfile)
1277
1278    #Get extent and reference
1279    x = fid.variables['x'][:]
1280    y = fid.variables['y'][:]
1281    volumes = fid.variables['volumes'][:]
1282
1283    ymin = min(y); ymax = max(y)
1284    xmin = min(x); xmax = max(x)
1285
1286    number_of_timesteps = fid.dimensions['number_of_timesteps']
1287    number_of_points = fid.dimensions['number_of_points']
1288    if origin is None:
1289
1290        #Get geo_reference
1291        #sww files don't have to have a geo_ref
1292        try:
1293            geo_reference = Geo_reference(NetCDFObject=fid)
1294        except AttributeError, e:
1295            geo_reference = Geo_reference() #Default georef object
1296
1297        xllcorner = geo_reference.get_xllcorner()
1298        yllcorner = geo_reference.get_yllcorner()
1299        zone = geo_reference.get_zone()
1300    else:
1301        zone = origin[0]
1302        xllcorner = origin[1]
1303        yllcorner = origin[2]
1304
1305
1306    #Get quantity and reduce if applicable
1307    if verbose: print 'Reading quantity %s' %quantity
1308
1309    if quantity.lower() == 'depth':
1310        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
1311    else:
1312        q = fid.variables[quantity][:]
1313
1314
1315    if len(q.shape) == 2:
1316        if verbose: print 'Reducing quantity %s' %quantity
1317        q_reduced = zeros( number_of_points, Float )
1318
1319        for k in range(number_of_points):
1320            q_reduced[k] = reduction( q[:,k] )
1321
1322        q = q_reduced
1323
1324    #Now q has dimension: number_of_points
1325
1326
1327    #Write prj file
1328    if verbose: print 'Writing %s' %prjfile
1329    prjid = open(prjfile, 'w')
1330    prjid.write('Projection    %s\n' %'UTM')
1331    prjid.write('Zone          %d\n' %zone)
1332    prjid.write('Datum         %s\n' %datum)
1333    prjid.write('Zunits        NO\n')
1334    prjid.write('Units         METERS\n')
1335    prjid.write('Spheroid      %s\n' %datum)
1336    prjid.write('Xshift        %d\n' %false_easting)
1337    prjid.write('Yshift        %d\n' %false_northing)
1338    prjid.write('Parameters\n')
1339    prjid.close()
1340
1341    #Create grid and update xll/yll corner and x,y
1342    if verbose: print 'Creating grid'
1343    ncols = int((xmax-xmin)/cellsize)+1
1344    nrows = int((ymax-ymin)/cellsize)+1
1345
1346    newxllcorner = xmin+xllcorner
1347    newyllcorner = ymin+yllcorner
1348
1349    x = x+xllcorner-newxllcorner
1350    y = y+yllcorner-newyllcorner
1351
1352    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1353    assert len(vertex_points.shape) == 2
1354
1355
1356    from Numeric import zeros, Float
1357    grid_points = zeros ( (ncols*nrows, 2), Float )
1358
1359
1360    for i in xrange(nrows):
1361        yg = i*cellsize
1362        for j in xrange(ncols):
1363            xg = j*cellsize
1364            k = i*ncols + j
1365
1366            #print k, xg, yg
1367            grid_points[k,0] = xg
1368            grid_points[k,1] = yg
1369
1370
1371    #Interpolate
1372
1373    from least_squares import Interpolation
1374    from util import inside_polygon
1375
1376    #FIXME: This should be done with precrop = True, otherwise it'll
1377    #take forever. With expand_search  set to False, some grid points might
1378    #miss out....
1379
1380    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1381                           precrop = False, expand_search = False,
1382                           verbose = verbose)
1383
1384    #Interpolate using quantity values
1385    if verbose: print 'Interpolating'
1386    grid_values = interp.interpolate(q).flat
1387
1388    #Write
1389    if verbose: print 'Writing %s' %ascfile
1390    ascid = open(ascfile, 'w')
1391
1392    ascid.write('ncols         %d\n' %ncols)
1393    ascid.write('nrows         %d\n' %nrows)
1394    ascid.write('xllcorner     %d\n' %newxllcorner)
1395    ascid.write('yllcorner     %d\n' %newyllcorner)
1396    ascid.write('cellsize      %f\n' %cellsize)
1397    ascid.write('NODATA_value  %d\n' %NODATA_value)
1398
1399
1400    #Get bounding polygon from mesh
1401    P = interp.mesh.get_boundary_polygon()
1402    inside_indices = inside_polygon(grid_points, P)
1403
1404    for i in range(nrows):
1405        if verbose and i%((nrows+10)/10)==0:
1406            print 'Doing row %d of %d' %(i, nrows)
1407
1408        for j in range(ncols):
1409            index = (nrows-i-1)*ncols+j
1410
1411            if sometrue(inside_indices == index):
1412                ascid.write('%f ' %grid_values[index])
1413            else:
1414                ascid.write('%d ' %NODATA_value)
1415
1416        ascid.write('\n')
1417
1418    #Close
1419    ascid.close()
1420    fid.close()
1421
1422
1423
1424
1425def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1426                                  verbose=False):
1427    """Read Digitial Elevation model from the following ASCII format (.asc)
1428
1429    Example:
1430
1431    ncols         3121
1432    nrows         1800
1433    xllcorner     722000
1434    yllcorner     5893000
1435    cellsize      25
1436    NODATA_value  -9999
1437    138.3698 137.4194 136.5062 135.5558 ..........
1438
1439    Convert basename_in + '.asc' to NetCDF format (.dem)
1440    mimicking the ASCII format closely.
1441
1442
1443    An accompanying file with same basename_in but extension .prj must exist
1444    and is used to fix the UTM zone, datum, false northings and eastings.
1445
1446    The prj format is assumed to be as
1447
1448    Projection    UTM
1449    Zone          56
1450    Datum         WGS84
1451    Zunits        NO
1452    Units         METERS
1453    Spheroid      WGS84
1454    Xshift        0.0000000000
1455    Yshift        10000000.0000000000
1456    Parameters
1457    """
1458
1459    import os
1460    from Scientific.IO.NetCDF import NetCDFFile
1461    from Numeric import Float, array
1462
1463    #root, ext = os.path.splitext(basename_in)
1464    root = basename_in
1465
1466    ###########################################
1467    # Read Meta data
1468    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1469    metadatafile = open(root + '.prj')
1470    metalines = metadatafile.readlines()
1471    metadatafile.close()
1472
1473    L = metalines[0].strip().split()
1474    assert L[0].strip().lower() == 'projection'
1475    projection = L[1].strip()                   #TEXT
1476
1477    L = metalines[1].strip().split()
1478    assert L[0].strip().lower() == 'zone'
1479    zone = int(L[1].strip())
1480
1481    L = metalines[2].strip().split()
1482    assert L[0].strip().lower() == 'datum'
1483    datum = L[1].strip()                        #TEXT
1484
1485    L = metalines[3].strip().split()
1486    assert L[0].strip().lower() == 'zunits'     #IGNORE
1487    zunits = L[1].strip()                       #TEXT
1488
1489    L = metalines[4].strip().split()
1490    assert L[0].strip().lower() == 'units'
1491    units = L[1].strip()                        #TEXT
1492
1493    L = metalines[5].strip().split()
1494    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1495    spheroid = L[1].strip()                     #TEXT
1496
1497    L = metalines[6].strip().split()
1498    assert L[0].strip().lower() == 'xshift'
1499    false_easting = float(L[1].strip())
1500
1501    L = metalines[7].strip().split()
1502    assert L[0].strip().lower() == 'yshift'
1503    false_northing = float(L[1].strip())
1504
1505    #print false_easting, false_northing, zone, datum
1506
1507
1508    ###########################################
1509    #Read DEM data
1510
1511    datafile = open(basename_in + '.asc')
1512
1513    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1514    lines = datafile.readlines()
1515    datafile.close()
1516
1517    if verbose: print 'Got', len(lines), ' lines'
1518
1519    ncols = int(lines[0].split()[1].strip())
1520    nrows = int(lines[1].split()[1].strip())
1521    xllcorner = float(lines[2].split()[1].strip())
1522    yllcorner = float(lines[3].split()[1].strip())
1523    cellsize = float(lines[4].split()[1].strip())
1524    NODATA_value = int(lines[5].split()[1].strip())
1525
1526    assert len(lines) == nrows + 6
1527
1528
1529    ##########################################
1530
1531
1532    if basename_out == None:
1533        netcdfname = root + '.dem'
1534    else:
1535        netcdfname = basename_out + '.dem'
1536
1537    if verbose: print 'Store to NetCDF file %s' %netcdfname
1538    # NetCDF file definition
1539    fid = NetCDFFile(netcdfname, 'w')
1540
1541    #Create new file
1542    fid.institution = 'Geoscience Australia'
1543    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1544                      'of spatial point data'
1545
1546    fid.ncols = ncols
1547    fid.nrows = nrows
1548    fid.xllcorner = xllcorner
1549    fid.yllcorner = yllcorner
1550    fid.cellsize = cellsize
1551    fid.NODATA_value = NODATA_value
1552
1553    fid.zone = zone
1554    fid.false_easting = false_easting
1555    fid.false_northing = false_northing
1556    fid.projection = projection
1557    fid.datum = datum
1558    fid.units = units
1559
1560
1561    # dimension definitions
1562    fid.createDimension('number_of_rows', nrows)
1563    fid.createDimension('number_of_columns', ncols)
1564
1565    # variable definitions
1566    fid.createVariable('elevation', Float, ('number_of_rows',
1567                                            'number_of_columns'))
1568
1569    # Get handles to the variables
1570    elevation = fid.variables['elevation']
1571
1572    #Store data
1573    n = len(lines[6:])
1574    for i, line in enumerate(lines[6:]):
1575        fields = line.split()
1576        if verbose and i%((n+10)/10)==0:
1577            print 'Processing row %d of %d' %(i, nrows)           
1578
1579        elevation[i, :] = array([float(x) for x in fields])
1580
1581    fid.close()
1582
1583
1584
1585
1586
1587def ferret2sww(basename_in, basename_out = None,
1588               verbose = False,
1589               minlat = None, maxlat = None,
1590               minlon = None, maxlon = None,
1591               mint = None, maxt = None, mean_stage = 0,
1592               origin = None, zscale = 1,
1593               fail_on_NaN = True,
1594               NaN_filler = 0,
1595               elevation = None,
1596               inverted_bathymetry = False
1597               ): #FIXME: Bathymetry should be obtained
1598                                  #from MOST somehow.
1599                                  #Alternatively from elsewhere
1600                                  #or, as a last resort,
1601                                  #specified here.
1602                                  #The value of -100 will work
1603                                  #for the Wollongong tsunami
1604                                  #scenario but is very hacky
1605    """Convert 'Ferret' NetCDF format for wave propagation to
1606    sww format native to pyvolution.
1607
1608    Specify only basename_in and read files of the form
1609    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1610    relative height, x-velocity and y-velocity, respectively.
1611
1612    Also convert latitude and longitude to UTM. All coordinates are
1613    assumed to be given in the GDA94 datum.
1614
1615    min's and max's: If omitted - full extend is used.
1616    To include a value min may equal it, while max must exceed it.
1617    Lat and lon are assuemd to be in decimal degrees
1618
1619    origin is a 3-tuple with geo referenced
1620    UTM coordinates (zone, easting, northing)
1621
1622    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1623    which means that longitude is the fastest
1624    varying dimension (row major order, so to speak)
1625
1626    ferret2sww uses grid points as vertices in a triangular grid
1627    counting vertices from lower left corner upwards, then right
1628    """
1629
1630    import os
1631    from Scientific.IO.NetCDF import NetCDFFile
1632    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1633    precision = Float
1634
1635
1636    #Get NetCDF data
1637    if verbose: print 'Reading files %s_*.nc' %basename_in
1638    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1639    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1640    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1641    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1642
1643    if basename_out is None:
1644        swwname = basename_in + '.sww'
1645    else:
1646        swwname = basename_out + '.sww'
1647
1648    times = file_h.variables['TIME']
1649    latitudes = file_h.variables['LAT']
1650    longitudes = file_h.variables['LON']
1651
1652
1653    e_lat = file_e.variables['LAT'][:]
1654
1655
1656    #Check that files are compatible
1657    from Numeric import allclose
1658    assert allclose(latitudes, file_u.variables['LAT'])
1659    assert allclose(latitudes, file_v.variables['LAT'])   
1660    assert allclose(latitudes, file_e.variables['LAT'])
1661
1662    assert allclose(longitudes, file_u.variables['LON'])
1663    assert allclose(longitudes, file_v.variables['LON'])
1664    assert allclose(longitudes, file_e.variables['LON'])   
1665
1666
1667
1668    if mint == None:
1669        jmin = 0
1670    else:
1671        jmin = searchsorted(times, mint)
1672
1673    if maxt == None:
1674        jmax=len(times)
1675    else:
1676        jmax = searchsorted(times, maxt)
1677
1678    if minlat == None:
1679        kmin=0
1680    else:
1681        kmin = searchsorted(latitudes, minlat)
1682
1683    if maxlat == None:
1684        kmax = len(latitudes)
1685    else:
1686        kmax = searchsorted(latitudes, maxlat)
1687
1688    if minlon == None:
1689        lmin=0
1690    else:
1691        lmin = searchsorted(longitudes, minlon)
1692
1693    if maxlon == None:
1694        lmax = len(longitudes)
1695    else:
1696        lmax = searchsorted(longitudes, maxlon)
1697
1698
1699
1700    times = times[jmin:jmax]
1701    latitudes = latitudes[kmin:kmax]
1702    longitudes = longitudes[lmin:lmax]
1703
1704
1705    if verbose: print 'cropping'
1706    zname = 'ELEVATION'
1707
1708   
1709    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1710    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1711    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1712    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1713
1714#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1715#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1716#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1717#        from Numeric import asarray
1718#        elevations=elevations.tolist()
1719#        elevations.reverse()
1720#        elevations=asarray(elevations)
1721#    else:
1722#        from Numeric import asarray
1723#        elevations=elevations.tolist()
1724#        elevations.reverse()
1725#        elevations=asarray(elevations)
1726#        'print hmmm'
1727
1728
1729
1730    #Get missing values
1731    nan_ha = file_h.variables['HA'].missing_value[0]
1732    nan_ua = file_u.variables['UA'].missing_value[0]
1733    nan_va = file_v.variables['VA'].missing_value[0]
1734    if hasattr(file_e.variables[zname],'missing_value'):
1735        nan_e  = file_e.variables[zname].missing_value[0]
1736    else:
1737        nan_e = None
1738
1739    #Cleanup
1740    from Numeric import sometrue
1741
1742    missing = (amplitudes == nan_ha)
1743    if sometrue (missing):
1744        if fail_on_NaN:
1745            msg = 'NetCDFFile %s contains missing values'\
1746                  %(basename_in+'_ha.nc')
1747            raise msg
1748        else:
1749            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1750
1751    missing = (uspeed == nan_ua)
1752    if sometrue (missing):
1753        if fail_on_NaN:
1754            msg = 'NetCDFFile %s contains missing values'\
1755                  %(basename_in+'_ua.nc')
1756            raise msg
1757        else:
1758            uspeed = uspeed*(missing==0) + missing*NaN_filler
1759
1760    missing = (vspeed == nan_va)
1761    if sometrue (missing):
1762        if fail_on_NaN:
1763            msg = 'NetCDFFile %s contains missing values'\
1764                  %(basename_in+'_va.nc')
1765            raise msg
1766        else:
1767            vspeed = vspeed*(missing==0) + missing*NaN_filler
1768
1769
1770    missing = (elevations == nan_e)
1771    if sometrue (missing):
1772        if fail_on_NaN:
1773            msg = 'NetCDFFile %s contains missing values'\
1774                  %(basename_in+'_e.nc')
1775            raise msg
1776        else:
1777            elevations = elevations*(missing==0) + missing*NaN_filler
1778
1779    #######
1780
1781
1782
1783    number_of_times = times.shape[0]
1784    number_of_latitudes = latitudes.shape[0]
1785    number_of_longitudes = longitudes.shape[0]
1786
1787    assert amplitudes.shape[0] == number_of_times
1788    assert amplitudes.shape[1] == number_of_latitudes
1789    assert amplitudes.shape[2] == number_of_longitudes
1790
1791    if verbose:
1792        print '------------------------------------------------'
1793        print 'Statistics:'
1794        print '  Extent (lat/lon):'
1795        print '    lat in [%f, %f], len(lat) == %d'\
1796              %(min(latitudes.flat), max(latitudes.flat),
1797                len(latitudes.flat))
1798        print '    lon in [%f, %f], len(lon) == %d'\
1799              %(min(longitudes.flat), max(longitudes.flat),
1800                len(longitudes.flat))
1801        print '    t in [%f, %f], len(t) == %d'\
1802              %(min(times.flat), max(times.flat), len(times.flat))
1803
1804        q = amplitudes.flat
1805        name = 'Amplitudes (ha) [cm]'
1806        print %s in [%f, %f]' %(name, min(q), max(q))
1807
1808        q = uspeed.flat
1809        name = 'Speeds (ua) [cm/s]'
1810        print %s in [%f, %f]' %(name, min(q), max(q))
1811
1812        q = vspeed.flat
1813        name = 'Speeds (va) [cm/s]'
1814        print %s in [%f, %f]' %(name, min(q), max(q))
1815
1816        q = elevations.flat
1817        name = 'Elevations (e) [m]'
1818        print %s in [%f, %f]' %(name, min(q), max(q))
1819
1820
1821    #print number_of_latitudes, number_of_longitudes
1822    number_of_points = number_of_latitudes*number_of_longitudes
1823    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1824
1825
1826    file_h.close()
1827    file_u.close()
1828    file_v.close()
1829    file_e.close()
1830
1831
1832    # NetCDF file definition
1833    outfile = NetCDFFile(swwname, 'w')
1834
1835    #Create new file
1836    outfile.institution = 'Geoscience Australia'
1837    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1838                          %(basename_in + '_ha.nc',
1839                            basename_in + '_ua.nc',
1840                            basename_in + '_va.nc',
1841                            basename_in + '_e.nc')
1842
1843
1844    #For sww compatibility
1845    outfile.smoothing = 'Yes'
1846    outfile.order = 1
1847
1848    #Start time in seconds since the epoch (midnight 1/1/1970)
1849    outfile.starttime = starttime = times[0]
1850    times = times - starttime  #Store relative times
1851
1852    # dimension definitions
1853    outfile.createDimension('number_of_volumes', number_of_volumes)
1854
1855    outfile.createDimension('number_of_vertices', 3)
1856    outfile.createDimension('number_of_points', number_of_points)
1857
1858
1859    #outfile.createDimension('number_of_timesteps', len(times))
1860    outfile.createDimension('number_of_timesteps', len(times))
1861
1862    # variable definitions
1863    outfile.createVariable('x', precision, ('number_of_points',))
1864    outfile.createVariable('y', precision, ('number_of_points',))
1865    outfile.createVariable('elevation', precision, ('number_of_points',))
1866
1867    #FIXME: Backwards compatibility
1868    outfile.createVariable('z', precision, ('number_of_points',))
1869    #################################
1870
1871    outfile.createVariable('volumes', Int, ('number_of_volumes',
1872                                            'number_of_vertices'))
1873
1874    outfile.createVariable('time', precision,
1875                           ('number_of_timesteps',))
1876
1877    outfile.createVariable('stage', precision,
1878                           ('number_of_timesteps',
1879                            'number_of_points'))
1880
1881    outfile.createVariable('xmomentum', precision,
1882                           ('number_of_timesteps',
1883                            'number_of_points'))
1884
1885    outfile.createVariable('ymomentum', precision,
1886                           ('number_of_timesteps',
1887                            'number_of_points'))
1888
1889
1890    #Store
1891    from coordinate_transforms.redfearn import redfearn
1892    x = zeros(number_of_points, Float)  #Easting
1893    y = zeros(number_of_points, Float)  #Northing
1894
1895
1896    if verbose: print 'Making triangular grid'
1897    #Check zone boundaries
1898    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
1899
1900    vertices = {}
1901    i = 0
1902    for k, lat in enumerate(latitudes):       #Y direction
1903        for l, lon in enumerate(longitudes):  #X direction
1904
1905            vertices[l,k] = i
1906
1907            zone, easting, northing = redfearn(lat,lon)
1908
1909            msg = 'Zone boundary crossed at longitude =', lon
1910            #assert zone == refzone, msg
1911            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
1912            x[i] = easting
1913            y[i] = northing
1914            i += 1
1915
1916
1917    #Construct 2 triangles per 'rectangular' element
1918    volumes = []
1919    for l in range(number_of_longitudes-1):    #X direction
1920        for k in range(number_of_latitudes-1): #Y direction
1921            v1 = vertices[l,k+1]
1922            v2 = vertices[l,k]
1923            v3 = vertices[l+1,k+1]
1924            v4 = vertices[l+1,k]
1925
1926            volumes.append([v1,v2,v3]) #Upper element
1927            volumes.append([v4,v3,v2]) #Lower element
1928
1929    volumes = array(volumes)
1930
1931    if origin == None:
1932        zone = refzone
1933        xllcorner = min(x)
1934        yllcorner = min(y)
1935    else:
1936        zone = origin[0]
1937        xllcorner = origin[1]
1938        yllcorner = origin[2]
1939
1940
1941    outfile.xllcorner = xllcorner
1942    outfile.yllcorner = yllcorner
1943    outfile.zone = zone
1944
1945
1946    if elevation is not None:
1947        z = elevation
1948    else:
1949        if inverted_bathymetry:
1950            z = -1*elevations
1951        else:
1952            z = elevations
1953        #FIXME: z should be obtained from MOST and passed in here
1954
1955    from Numeric import resize
1956    z = resize(z,outfile.variables['z'][:].shape)
1957    outfile.variables['x'][:] = x - xllcorner
1958    outfile.variables['y'][:] = y - yllcorner
1959    outfile.variables['z'][:] = z
1960    outfile.variables['elevation'][:] = z  #FIXME HACK
1961    outfile.variables['time'][:] = times   #Store time relative
1962    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
1963
1964
1965
1966    #Time stepping
1967    stage = outfile.variables['stage']
1968    xmomentum = outfile.variables['xmomentum']
1969    ymomentum = outfile.variables['ymomentum']
1970
1971    if verbose: print 'Converting quantities'
1972    n = len(times)
1973    for j in range(n):
1974        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
1975        i = 0
1976        for k in range(number_of_latitudes):      #Y direction
1977            for l in range(number_of_longitudes): #X direction
1978                w = zscale*amplitudes[j,k,l]/100 + mean_stage
1979                stage[j,i] = w
1980                h = w - z[i]
1981                xmomentum[j,i] = uspeed[j,k,l]/100*h
1982                ymomentum[j,i] = vspeed[j,k,l]/100*h
1983                i += 1
1984
1985
1986    if verbose:
1987        x = outfile.variables['x'][:]
1988        y = outfile.variables['y'][:]
1989        print '------------------------------------------------'
1990        print 'Statistics of output file:'
1991        print '  Name: %s' %swwname
1992        print '  Reference:'
1993        print '    Lower left corner: [%f, %f]'\
1994              %(xllcorner, yllcorner)
1995        print '    Start time: %f' %starttime
1996        print '  Extent:'
1997        print '    x [m] in [%f, %f], len(x) == %d'\
1998              %(min(x.flat), max(x.flat), len(x.flat))
1999        print '    y [m] in [%f, %f], len(y) == %d'\
2000              %(min(y.flat), max(y.flat), len(y.flat))
2001        print '    t [s] in [%f, %f], len(t) == %d'\
2002              %(min(times), max(times), len(times))
2003        print '  Quantities [SI units]:'
2004        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2005            q = outfile.variables[name][:].flat
2006            print '    %s in [%f, %f]' %(name, min(q), max(q))
2007
2008
2009
2010
2011    outfile.close()
2012
2013
2014
2015
2016def timefile2netcdf(filename, quantity_names = None):
2017    """Template for converting typical text files with time series to
2018    NetCDF tms file.
2019
2020
2021    The file format is assumed to be either two fields separated by a comma:
2022
2023        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2024
2025    E.g
2026
2027      31/08/04 00:00:00, 1.328223 0 0
2028      31/08/04 00:15:00, 1.292912 0 0
2029
2030    will provide a time dependent function f(t) with three attributes
2031
2032    filename is assumed to be the rootname with extenisons .txt and .sww
2033    """
2034
2035    import time, calendar
2036    from Numeric import array
2037    from config import time_format
2038    from util import ensure_numeric
2039
2040
2041    fid = open(filename + '.txt')
2042    line = fid.readline()
2043    fid.close()
2044
2045    fields = line.split(',')
2046    msg = 'File %s must have the format date, value0 value1 value2 ...'
2047    assert len(fields) == 2, msg
2048
2049    try:
2050        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2051    except ValueError:
2052        msg = 'First field in file %s must be' %filename
2053        msg += ' date-time with format %s.\n' %time_format
2054        msg += 'I got %s instead.' %fields[0]
2055        raise msg
2056
2057
2058    #Split values
2059    values = []
2060    for value in fields[1].split():
2061        values.append(float(value))
2062
2063    q = ensure_numeric(values)
2064
2065    msg = 'ERROR: File must contain at least one independent value'
2066    assert len(q.shape) == 1, msg
2067
2068
2069
2070    #Read times proper
2071    from Numeric import zeros, Float, alltrue
2072    from config import time_format
2073    import time, calendar
2074
2075    fid = open(filename + '.txt')
2076    lines = fid.readlines()
2077    fid.close()
2078
2079    N = len(lines)
2080    d = len(q)
2081
2082    T = zeros(N, Float)       #Time
2083    Q = zeros((N, d), Float)  #Values
2084
2085    for i, line in enumerate(lines):
2086        fields = line.split(',')
2087        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2088       
2089        T[i] = realtime - starttime
2090
2091        for j, value in enumerate(fields[1].split()):
2092            Q[i, j] = float(value)
2093
2094    msg = 'File %s must list time as a monotonuosly ' %filename
2095    msg += 'increasing sequence'
2096    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2097
2098
2099    #Create NetCDF file
2100    from Scientific.IO.NetCDF import NetCDFFile
2101
2102    fid = NetCDFFile(filename + '.tms', 'w')
2103
2104
2105    fid.institution = 'Geoscience Australia'
2106    fid.description = 'Time series'
2107
2108
2109    #Reference point
2110    #Start time in seconds since the epoch (midnight 1/1/1970)
2111    #FIXME: Use Georef
2112    fid.starttime = starttime
2113   
2114   
2115    # dimension definitions
2116    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2117    #fid.createDimension('number_of_vertices', 3)
2118
2119
2120    fid.createDimension('number_of_timesteps', len(T))
2121
2122    fid.createVariable('time', Float, ('number_of_timesteps',))
2123
2124    fid.variables['time'][:] = T
2125   
2126    for i in range(Q.shape[1]):
2127        try:
2128            name = quantity_names[i]
2129        except:   
2130            name = 'Attribute%d'%i
2131           
2132        fid.createVariable(name, Float, ('number_of_timesteps',))
2133        fid.variables[name][:] = Q[:,i]       
2134
2135    fid.close()
2136
2137
2138def extent_sww(file_name):
2139    """
2140    Read in an sww file.
2141
2142    Input;
2143    file_name - the sww file
2144
2145    Output;
2146    z - Vector of bed elevation
2147    volumes - Array.  Each row has 3 values, representing
2148    the vertices that define the volume
2149    time - Vector of the times where there is stage information
2150    stage - array with respect to time and vertices (x,y)
2151    """
2152
2153
2154    from Scientific.IO.NetCDF import NetCDFFile
2155
2156    #Check contents
2157    #Get NetCDF
2158    fid = NetCDFFile(file_name, 'r')
2159
2160    # Get the variables
2161    x = fid.variables['x'][:]
2162    y = fid.variables['y'][:]
2163    stage = fid.variables['stage'][:]
2164    #print "stage",stage
2165    #print "stage.shap",stage.shape
2166    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2167    #print "min(stage)",min(stage)
2168
2169    fid.close()
2170
2171    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2172
2173
2174def sww2domain(filename,boundary=None,t=None,\
2175               fail_if_NaN=True,NaN_filler=0\
2176               ,verbose = True,very_verbose = False):
2177    """
2178    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2179
2180    Boundary is not recommended if domian.smooth is not selected, as it
2181    uses unique coordinates, but not unique boundaries. This means that
2182    the boundary file will not be compatable with the coordiantes, and will
2183    give a different final boundary, or crash.
2184    """
2185    NaN=9.969209968386869e+036
2186    #initialise NaN.
2187
2188    from Scientific.IO.NetCDF import NetCDFFile
2189    from shallow_water import Domain
2190    from Numeric import asarray, transpose, resize
2191
2192    if verbose: print 'Reading from ', filename
2193    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2194    time = fid.variables['time']       #Timesteps
2195    if t is None:
2196        t = time[-1]
2197    time_interp = get_time_interp(time,t)
2198
2199    # Get the variables as Numeric arrays
2200    x = fid.variables['x'][:]             #x-coordinates of vertices
2201    y = fid.variables['y'][:]             #y-coordinates of vertices
2202    elevation = fid.variables['elevation']     #Elevation
2203    stage = fid.variables['stage']     #Water level
2204    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2205    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2206
2207    starttime = fid.starttime[0]
2208    volumes = fid.variables['volumes'][:] #Connectivity
2209    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2210
2211    conserved_quantities = []
2212    interpolated_quantities = {}
2213    other_quantities = []
2214
2215    # get geo_reference
2216    #sww files don't have to have a geo_ref
2217    try:
2218        geo_reference = Geo_reference(NetCDFObject=fid)
2219    except: #AttributeError, e:
2220        geo_reference = None
2221
2222    if verbose: print '    getting quantities'
2223    for quantity in fid.variables.keys():
2224        dimensions = fid.variables[quantity].dimensions
2225        if 'number_of_timesteps' in dimensions:
2226            conserved_quantities.append(quantity)
2227            interpolated_quantities[quantity]=\
2228                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2229        else: other_quantities.append(quantity)
2230
2231    other_quantities.remove('x')
2232    other_quantities.remove('y')
2233    other_quantities.remove('z')
2234    other_quantities.remove('volumes')
2235
2236    conserved_quantities.remove('time')
2237
2238    if verbose: print '    building domain'
2239#    From domain.Domain:
2240#    domain = Domain(coordinates, volumes,\
2241#                    conserved_quantities = conserved_quantities,\
2242#                    other_quantities = other_quantities,zone=zone,\
2243#                    xllcorner=xllcorner, yllcorner=yllcorner)
2244
2245#   From shallow_water.Domain:
2246    coordinates=coordinates.tolist()
2247    volumes=volumes.tolist()
2248    #FIXME:should this be in mesh?(peter row)
2249    if fid.smoothing == 'Yes': unique = False
2250    else: unique = True
2251    if unique:
2252        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2253
2254
2255    domain = Domain(coordinates, volumes, boundary)
2256
2257    if not boundary is None:
2258        domain.boundary = boundary
2259
2260    domain.geo_reference = geo_reference
2261
2262    domain.starttime=float(starttime)+float(t)
2263    domain.time=0.0
2264
2265    for quantity in other_quantities:
2266        try:
2267            NaN = fid.variables[quantity].missing_value
2268        except:
2269            pass #quantity has no missing_value number
2270        X = fid.variables[quantity][:]
2271        if very_verbose:
2272            print '       ',quantity
2273            print '        NaN =',NaN
2274            print '        max(X)'
2275            print '       ',max(X)
2276            print '        max(X)==NaN'
2277            print '       ',max(X)==NaN
2278            print ''
2279        if (max(X)==NaN) or (min(X)==NaN):
2280            if fail_if_NaN:
2281                msg = 'quantity "%s" contains no_data entry'%quantity
2282                raise msg
2283            else:
2284                data = (X<>NaN)
2285                X = (X*data)+(data==0)*NaN_filler
2286        if unique:
2287            X = resize(X,(len(X)/3,3))
2288        domain.set_quantity(quantity,X)
2289#
2290    for quantity in conserved_quantities:
2291        try:
2292            NaN = fid.variables[quantity].missing_value
2293        except:
2294            pass #quantity has no missing_value number
2295        X = interpolated_quantities[quantity]
2296        if very_verbose:
2297            print '       ',quantity
2298            print '        NaN =',NaN
2299            print '        max(X)'
2300            print '       ',max(X)
2301            print '        max(X)==NaN'
2302            print '       ',max(X)==NaN
2303            print ''
2304        if (max(X)==NaN) or (min(X)==NaN):
2305            if fail_if_NaN:
2306                msg = 'quantity "%s" contains no_data entry'%quantity
2307                raise msg
2308            else:
2309                data = (X<>NaN)
2310                X = (X*data)+(data==0)*NaN_filler
2311        if unique:
2312            X = resize(X,(X.shape[0]/3,3))
2313        domain.set_quantity(quantity,X)
2314    fid.close()
2315    return domain
2316
2317def interpolated_quantity(saved_quantity,time_interp):
2318
2319    #given an index and ratio, interpolate quantity with respect to time.
2320    index,ratio = time_interp
2321    Q = saved_quantity
2322    if ratio > 0:
2323        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2324    else:
2325        q = Q[index]
2326    #Return vector of interpolated values
2327    return q
2328
2329def get_time_interp(time,t=None):
2330    #Finds the ratio and index for time interpolation.
2331    #It is borrowed from previous pyvolution code.
2332    if t is None:
2333        t=time[-1]
2334        index = -1
2335        ratio = 0.
2336    else:
2337        T = time
2338        tau = t
2339        index=0
2340        msg = 'Time interval derived from file %s [%s:%s]'\
2341            %('FIXMEfilename', T[0], T[-1])
2342        msg += ' does not match model time: %s' %tau
2343        if tau < time[0]: raise msg
2344        if tau > time[-1]: raise msg
2345        while tau > time[index]: index += 1
2346        while tau < time[index]: index -= 1
2347        if tau == time[index]:
2348            #Protect against case where tau == time[-1] (last time)
2349            # - also works in general when tau == time[i]
2350            ratio = 0
2351        else:
2352            #t is now between index and index+1
2353            ratio = (tau - time[index])/(time[index+1] - time[index])
2354    return (index,ratio)
2355
2356
2357def weed(coordinates,volumes,boundary = None):
2358    if type(coordinates)=='array':
2359        coordinates = coordinates.tolist()
2360    if type(volumes)=='array':
2361        volumes = volumes.tolist()
2362
2363    unique = False
2364    point_dict = {}
2365    same_point = {}
2366    for i in range(len(coordinates)):
2367        point = tuple(coordinates[i])
2368        if point_dict.has_key(point):
2369            unique = True
2370            same_point[i]=point
2371            #to change all point i references to point j
2372        else:
2373            point_dict[point]=i
2374            same_point[i]=point
2375
2376    coordinates = []
2377    i = 0
2378    for point in point_dict.keys():
2379        point = tuple(point)
2380        coordinates.append(list(point))
2381        point_dict[point]=i
2382        i+=1
2383
2384
2385    for volume in volumes:
2386        for i in range(len(volume)):
2387            index = volume[i]
2388            if index>-1:
2389                volume[i]=point_dict[same_point[index]]
2390
2391    new_boundary = {}
2392    if not boundary is None:
2393        for segment in boundary.keys():
2394            point0 = point_dict[same_point[segment[0]]]
2395            point1 = point_dict[same_point[segment[1]]]
2396            label = boundary[segment]
2397            #FIXME should the bounday attributes be concaterated
2398            #('exterior, pond') or replaced ('pond')(peter row)
2399
2400            if new_boundary.has_key((point0,point1)):
2401                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2402                                              #+','+label
2403
2404            elif new_boundary.has_key((point1,point0)):
2405                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2406                                              #+','+label
2407            else: new_boundary[(point0,point1)]=label
2408
2409        boundary = new_boundary
2410
2411    return coordinates,volumes,boundary
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422###############################################
2423#OBSOLETE STUFF
2424#Native checkpoint format.
2425#Information needed to recreate a state is preserved
2426#FIXME: Rethink and maybe use netcdf format
2427def cpt_variable_writer(filename, t, v0, v1, v2):
2428    """Store all conserved quantities to file
2429    """
2430
2431    M, N = v0.shape
2432
2433    FN = create_filename(filename, 'cpt', M, t)
2434    #print 'Writing to %s' %FN
2435
2436    fid = open(FN, 'w')
2437    for i in range(M):
2438        for j in range(N):
2439            fid.write('%.16e ' %v0[i,j])
2440        for j in range(N):
2441            fid.write('%.16e ' %v1[i,j])
2442        for j in range(N):
2443            fid.write('%.16e ' %v2[i,j])
2444
2445        fid.write('\n')
2446    fid.close()
2447
2448
2449def cpt_variable_reader(filename, t, v0, v1, v2):
2450    """Store all conserved quantities to file
2451    """
2452
2453    M, N = v0.shape
2454
2455    FN = create_filename(filename, 'cpt', M, t)
2456    #print 'Reading from %s' %FN
2457
2458    fid = open(FN)
2459
2460
2461    for i in range(M):
2462        values = fid.readline().split() #Get one line
2463
2464        for j in range(N):
2465            v0[i,j] = float(values[j])
2466            v1[i,j] = float(values[3+j])
2467            v2[i,j] = float(values[6+j])
2468
2469    fid.close()
2470
2471def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2472    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2473    elevation.
2474    FIXME: Not in use pt
2475    """
2476
2477    M, N = v0.shape
2478
2479
2480    print X0
2481    import sys; sys.exit()
2482    FN = create_filename(filename, 'cpt', M)
2483    print 'Writing to %s' %FN
2484
2485    fid = open(FN, 'w')
2486    for i in range(M):
2487        for j in range(2):
2488            fid.write('%.16e ' %X0[i,j])   #x, y
2489        for j in range(N):
2490            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2491
2492        for j in range(2):
2493            fid.write('%.16e ' %X1[i,j])   #x, y
2494        for j in range(N):
2495            fid.write('%.16e ' %v1[i,j])
2496
2497        for j in range(2):
2498            fid.write('%.16e ' %X2[i,j])   #x, y
2499        for j in range(N):
2500            fid.write('%.16e ' %v2[i,j])
2501
2502        fid.write('\n')
2503    fid.close()
2504
2505
2506
2507#Function for storing out to e.g. visualisation
2508#FIXME: Do we want this?
2509#FIXME: Not done yet for this version
2510def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2511    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2512    """
2513
2514    M, N = v0.shape
2515
2516    FN = create_filename(filename, 'dat', M)
2517    #print 'Writing to %s' %FN
2518
2519    fid = open(FN, 'w')
2520    for i in range(M):
2521        for j in range(2):
2522            fid.write('%f ' %X0[i,j])   #x, y
2523        fid.write('%f ' %v0[i,0])       #z
2524
2525        for j in range(2):
2526            fid.write('%f ' %X1[i,j])   #x, y
2527        fid.write('%f ' %v1[i,0])       #z
2528
2529        for j in range(2):
2530            fid.write('%f ' %X2[i,j])   #x, y
2531        fid.write('%f ' %v2[i,0])       #z
2532
2533        fid.write('\n')
2534    fid.close()
2535
2536
2537
2538def dat_variable_writer(filename, t, v0, v1, v2):
2539    """Store water height to file
2540    """
2541
2542    M, N = v0.shape
2543
2544    FN = create_filename(filename, 'dat', M, t)
2545    #print 'Writing to %s' %FN
2546
2547    fid = open(FN, 'w')
2548    for i in range(M):
2549        fid.write('%.4f ' %v0[i,0])
2550        fid.write('%.4f ' %v1[i,0])
2551        fid.write('%.4f ' %v2[i,0])
2552
2553        fid.write('\n')
2554    fid.close()
2555
2556
2557def read_sww(filename):
2558    """Read sww Net CDF file containing Shallow Water Wave simulation
2559
2560    The integer array volumes is of shape Nx3 where N is the number of
2561    triangles in the mesh.
2562
2563    Each entry in volumes is an index into the x,y arrays (the location).
2564
2565    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2566    number_of_timesteps, number_of_points.
2567
2568    The momentum is not always stored.
2569
2570    """
2571    from Scientific.IO.NetCDF import NetCDFFile
2572    print 'Reading from ', filename
2573    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2574#latitude, longitude
2575    # Get the variables as Numeric arrays
2576    x = fid.variables['x']             #x-coordinates of vertices
2577    y = fid.variables['y']             #y-coordinates of vertices
2578    z = fid.variables['elevation']     #Elevation
2579    time = fid.variables['time']       #Timesteps
2580    stage = fid.variables['stage']     #Water level
2581    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2582    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2583
2584    volumes = fid.variables['volumes'] #Connectivity
2585
2586
2587def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2588                 verbose=False):
2589    """Read Digitial Elevation model from the following NetCDF format (.dem)
2590
2591    Example:
2592
2593    ncols         3121
2594    nrows         1800
2595    xllcorner     722000
2596    yllcorner     5893000
2597    cellsize      25
2598    NODATA_value  -9999
2599    138.3698 137.4194 136.5062 135.5558 ..........
2600
2601    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2602    """
2603
2604    import os
2605    from Scientific.IO.NetCDF import NetCDFFile
2606    from Numeric import Float, zeros, sum, reshape, equal
2607
2608    root = basename_in
2609    inname = root + '.dem'
2610
2611    #Open existing netcdf file to read
2612    infile = NetCDFFile(inname, 'r')
2613    if verbose: print 'Reading DEM from %s' %inname
2614
2615    #Read metadata
2616    ncols = infile.ncols[0]
2617    nrows = infile.nrows[0]
2618    xllcorner = infile.xllcorner[0]
2619    yllcorner = infile.yllcorner[0]
2620    cellsize = infile.cellsize[0]
2621    NODATA_value = infile.NODATA_value[0]
2622    zone = infile.zone[0]
2623    false_easting = infile.false_easting[0]
2624    false_northing = infile.false_northing[0]
2625    projection = infile.projection
2626    datum = infile.datum
2627    units = infile.units
2628
2629    dem_elevation = infile.variables['elevation']
2630
2631    #Get output file name
2632    if basename_out == None:
2633        outname = root + '_' + repr(cellsize_new) + '.dem'
2634    else:
2635        outname = basename_out + '.dem'
2636
2637    if verbose: print 'Write decimated NetCDF file to %s' %outname
2638
2639    #Determine some dimensions for decimated grid
2640    (nrows_stencil, ncols_stencil) = stencil.shape
2641    x_offset = ncols_stencil / 2
2642    y_offset = nrows_stencil / 2
2643    cellsize_ratio = int(cellsize_new / cellsize)
2644    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2645    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2646
2647    #Open netcdf file for output
2648    outfile = NetCDFFile(outname, 'w')
2649
2650    #Create new file
2651    outfile.institution = 'Geoscience Australia'
2652    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2653                           'of spatial point data'
2654    #Georeferencing
2655    outfile.zone = zone
2656    outfile.projection = projection
2657    outfile.datum = datum
2658    outfile.units = units
2659
2660    outfile.cellsize = cellsize_new
2661    outfile.NODATA_value = NODATA_value
2662    outfile.false_easting = false_easting
2663    outfile.false_northing = false_northing
2664
2665    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2666    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2667    outfile.ncols = ncols_new
2668    outfile.nrows = nrows_new
2669
2670    # dimension definition
2671    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2672
2673    # variable definition
2674    outfile.createVariable('elevation', Float, ('number_of_points',))
2675
2676    # Get handle to the variable
2677    elevation = outfile.variables['elevation']
2678
2679    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2680
2681    #Store data
2682    global_index = 0
2683    for i in range(nrows_new):
2684        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2685        lower_index = global_index
2686        telev =  zeros(ncols_new, Float)
2687        local_index = 0
2688        trow = i * cellsize_ratio
2689
2690        for j in range(ncols_new):
2691            tcol = j * cellsize_ratio
2692            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2693
2694            #if dem contains 1 or more NODATA_values set value in
2695            #decimated dem to NODATA_value, else compute decimated
2696            #value using stencil
2697            if sum(sum(equal(tmp, NODATA_value))) > 0:
2698                telev[local_index] = NODATA_value
2699            else:
2700                telev[local_index] = sum(sum(tmp * stencil))
2701
2702            global_index += 1
2703            local_index += 1
2704
2705        upper_index = global_index
2706
2707        elevation[lower_index:upper_index] = telev
2708
2709    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2710
2711    infile.close()
2712    outfile.close()
2713
2714def sww2ers(basename_in, basename_out = None,
2715            quantity = None,
2716            timestep = None,
2717            reduction = None,
2718            cellsize = 10,
2719            verbose = False,
2720            origin = None,
2721            datum = 'WGS84'):
2722   
2723    """Read SWW file and convert to Digitial Elevation model format (.asc)
2724
2725    Example:
2726
2727    ncols         3121
2728    nrows         1800
2729    xllcorner     722000
2730    yllcorner     5893000
2731    cellsize      25
2732    NODATA_value  -9999
2733    138.3698 137.4194 136.5062 135.5558 ..........
2734
2735    Also write accompanying file with same basename_in but extension .prj
2736    used to fix the UTM zone, datum, false northings and eastings.
2737
2738    The prj format is assumed to be as
2739
2740    Projection    UTM
2741    Zone          56
2742    Datum         WGS84
2743    Zunits        NO
2744    Units         METERS
2745    Spheroid      WGS84
2746    Xshift        0.0000000000
2747    Yshift        10000000.0000000000
2748    Parameters
2749
2750
2751    if quantity is given, out values from quantity otherwise default to
2752    elevation
2753
2754    if timestep (an index) is given, output quantity at that timestep
2755
2756    if reduction is given use that to reduce quantity over all timesteps.
2757
2758    """
2759    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2760    import ermapper_grids
2761
2762    header = {}
2763    false_easting = 500000
2764    false_northing = 10000000
2765    NODATA_value = 0
2766
2767    if quantity is None:
2768        quantity = 'elevation'
2769
2770    if reduction is None:
2771        reduction = max
2772
2773    if basename_out is None:
2774        basename_out = basename_in + '_%s' %quantity
2775 
2776    swwfile = basename_in + '.sww'
2777    # Note the use of a .ers extension is optional (write_ermapper_grid will
2778    # deal with either option
2779    ersfile = basename_out
2780##    prjfile = basename_out + '.prj'
2781
2782
2783    if verbose: print 'Reading from %s' %swwfile
2784    #Read sww file
2785    from Scientific.IO.NetCDF import NetCDFFile
2786    fid = NetCDFFile(swwfile)
2787
2788    #Get extent and reference
2789    x = fid.variables['x'][:]
2790    y = fid.variables['y'][:]
2791    volumes = fid.variables['volumes'][:]
2792
2793    ymin = min(y); ymax = max(y)
2794    xmin = min(x); xmax = max(x)
2795
2796    number_of_timesteps = fid.dimensions['number_of_timesteps']
2797    number_of_points = fid.dimensions['number_of_points']
2798    if origin is None:
2799
2800        #Get geo_reference
2801        #sww files don't have to have a geo_ref
2802        try:
2803            geo_reference = Geo_reference(NetCDFObject=fid)
2804        except AttributeError, e:
2805            geo_reference = Geo_reference() #Default georef object
2806
2807        xllcorner = geo_reference.get_xllcorner()
2808        yllcorner = geo_reference.get_yllcorner()
2809        zone = geo_reference.get_zone()
2810    else:
2811        zone = origin[0]
2812        xllcorner = origin[1]
2813        yllcorner = origin[2]
2814
2815
2816    #Get quantity and reduce if applicable
2817    if verbose: print 'Reading quantity %s' %quantity
2818
2819    if quantity.lower() == 'depth':
2820        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2821    else:
2822        q = fid.variables[quantity][:]
2823
2824
2825    if len(q.shape) == 2:
2826        if verbose: print 'Reducing quantity %s' %quantity
2827        q_reduced = zeros( number_of_points, Float )
2828
2829        for k in range(number_of_points):
2830            q_reduced[k] = reduction( q[:,k] )
2831
2832        q = q_reduced
2833
2834    #Now q has dimension: number_of_points
2835
2836    #Create grid and update xll/yll corner and x,y
2837    if verbose: print 'Creating grid'
2838    ncols = int((xmax-xmin)/cellsize)+1
2839    nrows = int((ymax-ymin)/cellsize)+1
2840
2841    newxllcorner = xmin+xllcorner
2842    newyllcorner = ymin+yllcorner
2843
2844    x = x+xllcorner-newxllcorner
2845    y = y+yllcorner-newyllcorner
2846
2847    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2848    assert len(vertex_points.shape) == 2
2849
2850
2851    from Numeric import zeros, Float
2852    grid_points = zeros ( (ncols*nrows, 2), Float )
2853
2854
2855    for i in xrange(nrows):
2856##        yg = i*cellsize
2857        yg = (nrows-i)*cellsize # this will flip the order of the y values
2858        for j in xrange(ncols):
2859            xg = j*cellsize
2860            k = i*ncols + j
2861
2862##            print k, xg, yg
2863            grid_points[k,0] = xg
2864            grid_points[k,1] = yg
2865
2866    #Interpolate
2867    from least_squares import Interpolation
2868    from util import inside_polygon
2869
2870    #FIXME: This should be done with precrop = True, otherwise it'll
2871    #take forever. With expand_search  set to False, some grid points might
2872    #miss out....
2873
2874    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
2875                           precrop = False, expand_search = True,
2876                           verbose = verbose)
2877
2878    #Interpolate using quantity values
2879    if verbose: print 'Interpolating'
2880    grid_values = interp.interpolate(q).flat
2881    grid_values = reshape(grid_values,(nrows, ncols))
2882
2883
2884    # setup header information
2885    header['datum'] = '"' + datum + '"'
2886    # FIXME The use of hardwired UTM and zone number needs to be made optional
2887    # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2888    header['projection'] = '"UTM-' + str(zone) + '"'
2889    header['coordinatetype'] = 'EN'
2890    if header['coordinatetype'] == 'LL':
2891        header['longitude'] = str(newxllcorner)
2892        header['latitude'] = str(newyllcorner)
2893    elif header['coordinatetype'] == 'EN':
2894        header['eastings'] = str(newxllcorner)
2895        header['northings'] = str(newyllcorner)
2896    header['nullcellvalue'] = str(NODATA_value)
2897    header['xdimension'] = str(cellsize)
2898    header['ydimension'] = str(cellsize)
2899    header['value'] = '"' + quantity + '"'
2900
2901
2902    #Write
2903    if verbose: print 'Writing %s' %ersfile
2904    ermapper_grids.write_ermapper_grid(ersfile,grid_values, header)
2905
2906    fid.close()   
2907
2908##    ascid = open(ascfile, 'w')
2909##
2910##    ascid.write('ncols         %d\n' %ncols)
2911##    ascid.write('nrows         %d\n' %nrows)
2912##    ascid.write('xllcorner     %d\n' %newxllcorner)
2913##    ascid.write('yllcorner     %d\n' %newyllcorner)
2914##    ascid.write('cellsize      %f\n' %cellsize)
2915##    ascid.write('NODATA_value  %d\n' %NODATA_value)
2916##
2917##
2918##    #Get bounding polygon from mesh
2919##    P = interp.mesh.get_boundary_polygon()
2920##    inside_indices = inside_polygon(grid_points, P)
2921##
2922##    for i in range(nrows):
2923##        if verbose and i%((nrows+10)/10)==0:
2924##            print 'Doing row %d of %d' %(i, nrows)
2925##
2926##        for j in range(ncols):
2927##            index = (nrows-i-1)*ncols+j
2928##
2929##            if sometrue(inside_indices == index):
2930##                ascid.write('%f ' %grid_values[index])
2931##            else:
2932##                ascid.write('%d ' %NODATA_value)
2933##
2934##        ascid.write('\n')
2935##
2936##    #Close
2937##    ascid.close()
2938##    fid.close()
Note: See TracBrowser for help on using the repository browser.