source: inundation/pyvolution/data_manager.py @ 1833

Last change on this file since 1833 was 1833, checked in by tdhu, 20 years ago

Testing is as yet uncomplete. Test_data_manager is currently running data_manager to import an sww to a ermapper grid.

File size: 84.1 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII foramt of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18
19.dem: NetCDF representation of regular DEM data
20
21.tsh: ASCII format for storing meshes and associated boundary and region info
22.msh: NetCDF format for storing meshes and associated boundary and region info
23
24.nc: Native ferret NetCDF format
25.geo: Houdinis ascii geometry format (?)
26
27
28A typical dataflow can be described as follows
29
30Manually created files:
31ASC, PRJ:     Digital elevation models (gridded)
32TSH:          Triangular meshes (e.g. created from pmesh)
33NC            Model outputs for use as boundary conditions (e.g from MOST)
34
35
36AUTOMATICALLY CREATED FILES:
37
38ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
39
40NC -> SWW: Conversion of MOST bundary files to boundary sww
41
42PTS + TSH -> TSH with elevation: Least squares fit
43
44TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
45
46TSH + Boundary SWW -> SWW: Simluation using pyvolution
47
48"""
49
50from Numeric import concatenate
51
52from coordinate_transforms.geo_reference import Geo_reference
53
54def make_filename(s):
55    """Transform argument string into a suitable filename
56    """
57
58    s = s.strip()
59    s = s.replace(' ', '_')
60    s = s.replace('(', '')
61    s = s.replace(')', '')
62    s = s.replace('__', '_')
63
64    return s
65
66
67def check_dir(path, verbose=None):
68    """Check that specified path exists.
69    If path does not exist it will be created if possible
70
71    USAGE:
72       checkdir(path, verbose):
73
74    ARGUMENTS:
75        path -- Directory
76        verbose -- Flag verbose output (default: None)
77
78    RETURN VALUE:
79        Verified path including trailing separator
80
81    """
82
83    import os, sys
84    import os.path
85
86    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
87        unix = 0
88    else:
89        unix = 1
90
91
92    if path[-1] != os.sep:
93        path = path + os.sep  # Add separator for directories
94
95    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
96    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
97        try:
98            exitcode=os.mkdir(path)
99
100            # Change access rights if possible
101            #
102            if unix:
103                exitcode=os.system('chmod 775 '+path)
104            else:
105                pass  # FIXME: What about acces rights under Windows?
106
107            if verbose: print 'MESSAGE: Directory', path, 'created.'
108
109        except:
110            print 'WARNING: Directory', path, 'could not be created.'
111            if unix:
112                path = '/tmp/'
113            else:
114                path = 'C:'
115
116            print 'Using directory %s instead' %path
117
118    return(path)
119
120
121
122def del_dir(path):
123    """Recursively delete directory path and all its contents
124    """
125
126    import os
127
128    if os.path.isdir(path):
129        for file in os.listdir(path):
130            X = os.path.join(path, file)
131
132
133            if os.path.isdir(X) and not os.path.islink(X):
134                del_dir(X)
135            else:
136                try:
137                    os.remove(X)
138                except:
139                    print "Could not remove file %s" %X
140
141        os.rmdir(path)
142
143
144
145def create_filename(datadir, filename, format, size=None, time=None):
146
147    import os
148    #from config import data_dir
149
150    FN = check_dir(datadir) + filename
151
152    if size is not None:
153        FN += '_size%d' %size
154
155    if time is not None:
156        FN += '_time%.2f' %time
157
158    FN += '.' + format
159    return FN
160
161
162def get_files(datadir, filename, format, size):
163    """Get all file (names) with gven name, size and format
164    """
165
166    import glob
167
168    import os
169    #from config import data_dir
170
171    dir = check_dir(datadir)
172
173    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
174    return glob.glob(pattern)
175
176
177
178#Generic class for storing output to e.g. visualisation or checkpointing
179class Data_format:
180    """Generic interface to data formats
181    """
182
183
184    def __init__(self, domain, extension, mode = 'w'):
185        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
186                                        '''   'w' (write)'''+\
187                                        '''   'r' (read)''' +\
188                                        '''   'a' (append)'''
189
190        #Create filename
191        #self.filename = create_filename(domain.get_datadir(),
192        #domain.get_name(), extension, len(domain))
193
194
195        self.filename = create_filename(domain.get_datadir(),
196                        domain.get_name(), extension)
197
198        #print 'F', self.filename
199        self.timestep = 0
200        self.number_of_volumes = len(domain)
201        self.domain = domain
202
203
204        #FIXME: Should we have a general set_precision function?
205
206
207
208#Class for storing output to e.g. visualisation
209class Data_format_sww(Data_format):
210    """Interface to native NetCDF format (.sww) for storing model output
211
212    There are two kinds of data
213
214    1: Constant data: Vertex coordinates and field values. Stored once
215    2: Variable data: Conserved quantities. Stored once per timestep.
216
217    All data is assumed to reside at vertex locations.
218    """
219
220
221    def __init__(self, domain, mode = 'w',\
222                 max_size = 2000000000,
223                 recursion=False):
224        from Scientific.IO.NetCDF import NetCDFFile
225        from Numeric import Int, Float, Float32
226
227        self.precision = Float32 #Use single precision
228        if hasattr(domain, 'max_size'):
229            self.max_size = domain.max_size #file size max is 2Gig
230        else:
231            self.max_size = max_size
232        self.recursion = recursion
233        self.mode = mode
234
235        Data_format.__init__(self, domain, 'sww', mode)
236
237
238        # NetCDF file definition
239        fid = NetCDFFile(self.filename, mode)
240
241        if mode == 'w':
242
243            #Create new file
244            fid.institution = 'Geoscience Australia'
245            fid.description = 'Output from pyvolution suitable for plotting'
246
247            if domain.smooth:
248                fid.smoothing = 'Yes'
249            else:
250                fid.smoothing = 'No'
251
252            fid.order = domain.default_order
253           
254            if hasattr(domain, 'texture'):                 
255                fid.texture = domain.texture
256            #else:                         
257            #    fid.texture = 'None'       
258
259            #Reference point
260            #Start time in seconds since the epoch (midnight 1/1/1970)
261            #FIXME: Use Georef
262            fid.starttime = domain.starttime
263
264            # dimension definitions
265            fid.createDimension('number_of_volumes', self.number_of_volumes)
266            fid.createDimension('number_of_vertices', 3)
267
268            if domain.smooth is True:
269                fid.createDimension('number_of_points', len(domain.vertexlist))
270            else:
271                fid.createDimension('number_of_points', 3*self.number_of_volumes)
272
273            fid.createDimension('number_of_timesteps', None) #extensible
274
275            # variable definitions
276            fid.createVariable('x', self.precision, ('number_of_points',))
277            fid.createVariable('y', self.precision, ('number_of_points',))
278            fid.createVariable('elevation', self.precision, ('number_of_points',))
279            if domain.geo_reference is not None:
280                domain.geo_reference.write_NetCDF(fid)
281               
282            #FIXME: Backwards compatibility
283            fid.createVariable('z', self.precision, ('number_of_points',))
284            #################################
285
286            fid.createVariable('volumes', Int, ('number_of_volumes',
287                                                'number_of_vertices'))
288
289            fid.createVariable('time', self.precision,
290                               ('number_of_timesteps',))
291
292            fid.createVariable('stage', self.precision,
293                               ('number_of_timesteps',
294                                'number_of_points'))
295
296            fid.createVariable('xmomentum', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('ymomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304        #Close
305        fid.close()
306
307
308    def store_connectivity(self):
309        """Specialisation of store_connectivity for net CDF format
310
311        Writes x,y,z coordinates of triangles constituting
312        the bed elevation.
313        """
314
315        from Scientific.IO.NetCDF import NetCDFFile
316
317        from Numeric import concatenate, Int
318
319        domain = self.domain
320
321        #Get NetCDF
322        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
323
324        # Get the variables
325        x = fid.variables['x']
326        y = fid.variables['y']
327        z = fid.variables['elevation']
328
329        volumes = fid.variables['volumes']
330
331        # Get X, Y and bed elevation Z
332        Q = domain.quantities['elevation']
333        X,Y,Z,V = Q.get_vertex_values(xy=True,
334                                      precision = self.precision)
335
336
337
338        x[:] = X.astype(self.precision)
339        y[:] = Y.astype(self.precision)
340        z[:] = Z.astype(self.precision)
341
342        #FIXME: Backwards compatibility
343        z = fid.variables['z']
344        z[:] = Z.astype(self.precision)
345        ################################
346
347        volumes[:] = V.astype(volumes.typecode())
348
349        #Close
350        fid.close()
351
352    def store_timestep(self, names):
353        """Store time and named quantities to file
354        """
355        from Scientific.IO.NetCDF import NetCDFFile
356        import types
357        from time import sleep
358        from os import stat
359
360        minimum_allowed_depth = 0.001
361        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
362        from Numeric import choose
363
364        #Get NetCDF
365        retries = 0
366        file_open = False
367        while not file_open and retries < 10:
368            try:
369                fid = NetCDFFile(self.filename, 'a') #Open existing file
370            except IOError:
371                #This could happen if someone was reading the file.
372                #In that case, wait a while and try again
373                msg = 'Warning (store_timestep): File %s could not be opened'\
374                      %self.filename
375                msg += ' - trying step %s again' %self.domain.time
376                print msg
377                retries += 1
378                sleep(1)
379            else:
380                file_open = True
381
382        if not file_open:
383            msg = 'File %s could not be opened for append' %self.filename
384            raise msg
385
386
387
388        #Check to see if the file is already too big:
389        time = fid.variables['time']
390        i = len(time)+1
391        file_size = stat(self.filename)[6]
392        file_size_increase =  file_size/i
393        if file_size + file_size_increase > self.max_size*(2**self.recursion):
394            #in order to get the file name and start time correct,
395            #I change the domian.filename and domain.starttime.
396            #This is the only way to do this without changing
397            #other modules (I think).
398
399            #write a filename addon that won't break swollens reader
400            #(10.sww is bad)
401            filename_ext = '_time_%s'%self.domain.time
402            filename_ext = filename_ext.replace('.', '_')
403            #remember the old filename, then give domain a
404            #name with the extension
405            old_domain_filename = self.domain.filename
406            if not self.recursion:
407                self.domain.filename = self.domain.filename+filename_ext
408
409            #change the domain starttime to the current time
410            old_domain_starttime = self.domain.starttime
411            self.domain.starttime = self.domain.time
412
413            #build a new data_structure.
414            next_data_structure=\
415                Data_format_sww(self.domain, mode=self.mode,\
416                                max_size = self.max_size,\
417                                recursion = self.recursion+1)
418            if not self.recursion:
419                print '    file_size = %s'%file_size
420                print '    saving file to %s'%next_data_structure.filename
421            #set up the new data_structure
422            self.domain.writer = next_data_structure
423
424            #FIXME - could be cleaner to use domain.store_timestep etc.
425            next_data_structure.store_connectivity()
426            next_data_structure.store_timestep(names)
427            fid.sync()
428            fid.close()
429
430            #restore the old starttime and filename
431            self.domain.starttime = old_domain_starttime
432            self.domain.filename = old_domain_filename
433        else:
434            self.recursion = False
435            domain = self.domain
436
437            # Get the variables
438            time = fid.variables['time']
439            stage = fid.variables['stage']
440            xmomentum = fid.variables['xmomentum']
441            ymomentum = fid.variables['ymomentum']
442            i = len(time)
443
444            #Store time
445            time[i] = self.domain.time
446
447
448            if type(names) not in [types.ListType, types.TupleType]:
449                names = [names]
450
451            for name in names:
452                # Get quantity
453                Q = domain.quantities[name]
454                A,V = Q.get_vertex_values(xy = False,
455                                          precision = self.precision)
456
457                #FIXME: Make this general (see below)
458                if name == 'stage':
459                    z = fid.variables['elevation']
460                    #print z[:]
461                    #print A-z[:]
462                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
463                    stage[i,:] = A.astype(self.precision)
464                elif name == 'xmomentum':
465                    xmomentum[i,:] = A.astype(self.precision)
466                elif name == 'ymomentum':
467                    ymomentum[i,:] = A.astype(self.precision)
468
469                #As in....
470                #eval( name + '[i,:] = A.astype(self.precision)' )
471                #FIXME: But we need a UNIT test for that before refactoring
472
473
474
475            #Flush and close
476            fid.sync()
477            fid.close()
478
479
480
481#Class for handling checkpoints data
482class Data_format_cpt(Data_format):
483    """Interface to native NetCDF format (.cpt)
484    """
485
486
487    def __init__(self, domain, mode = 'w'):
488        from Scientific.IO.NetCDF import NetCDFFile
489        from Numeric import Int, Float, Float
490
491        self.precision = Float #Use full precision
492
493        Data_format.__init__(self, domain, 'sww', mode)
494
495
496        # NetCDF file definition
497        fid = NetCDFFile(self.filename, mode)
498
499        if mode == 'w':
500            #Create new file
501            fid.institution = 'Geoscience Australia'
502            fid.description = 'Checkpoint data'
503            #fid.smooth = domain.smooth
504            fid.order = domain.default_order
505
506            # dimension definitions
507            fid.createDimension('number_of_volumes', self.number_of_volumes)
508            fid.createDimension('number_of_vertices', 3)
509
510            #Store info at all vertices (no smoothing)
511            fid.createDimension('number_of_points', 3*self.number_of_volumes)
512            fid.createDimension('number_of_timesteps', None) #extensible
513
514            # variable definitions
515
516            #Mesh
517            fid.createVariable('x', self.precision, ('number_of_points',))
518            fid.createVariable('y', self.precision, ('number_of_points',))
519
520
521            fid.createVariable('volumes', Int, ('number_of_volumes',
522                                                'number_of_vertices'))
523
524            fid.createVariable('time', self.precision,
525                               ('number_of_timesteps',))
526
527            #Allocate space for all quantities
528            for name in domain.quantities.keys():
529                fid.createVariable(name, self.precision,
530                                   ('number_of_timesteps',
531                                    'number_of_points'))
532
533        #Close
534        fid.close()
535
536
537    def store_checkpoint(self):
538        """
539        Write x,y coordinates of triangles.
540        Write connectivity (
541        constituting
542        the bed elevation.
543        """
544
545        from Scientific.IO.NetCDF import NetCDFFile
546
547        from Numeric import concatenate
548
549        domain = self.domain
550
551        #Get NetCDF
552        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
553
554        # Get the variables
555        x = fid.variables['x']
556        y = fid.variables['y']
557
558        volumes = fid.variables['volumes']
559
560        # Get X, Y and bed elevation Z
561        Q = domain.quantities['elevation']
562        X,Y,Z,V = Q.get_vertex_values(xy=True,
563                                      precision = self.precision)
564
565
566
567        x[:] = X.astype(self.precision)
568        y[:] = Y.astype(self.precision)
569        z[:] = Z.astype(self.precision)
570
571        volumes[:] = V
572
573        #Close
574        fid.close()
575
576
577    def store_timestep(self, name):
578        """Store time and named quantity to file
579        """
580        from Scientific.IO.NetCDF import NetCDFFile
581        from time import sleep
582
583        #Get NetCDF
584        retries = 0
585        file_open = False
586        while not file_open and retries < 10:
587            try:
588                fid = NetCDFFile(self.filename, 'a') #Open existing file
589            except IOError:
590                #This could happen if someone was reading the file.
591                #In that case, wait a while and try again
592                msg = 'Warning (store_timestep): File %s could not be opened'\
593                          %self.filename
594                msg += ' - trying again'
595                print msg
596                retries += 1
597                sleep(1)
598            else:
599                file_open = True
600
601            if not file_open:
602                msg = 'File %s could not be opened for append' %self.filename
603            raise msg
604
605
606        domain = self.domain
607
608        # Get the variables
609        time = fid.variables['time']
610        stage = fid.variables['stage']
611        i = len(time)
612
613        #Store stage
614        time[i] = self.domain.time
615
616        # Get quantity
617        Q = domain.quantities[name]
618        A,V = Q.get_vertex_values(xy=False,
619                                  precision = self.precision)
620
621        stage[i,:] = A.astype(self.precision)
622
623        #Flush and close
624        fid.sync()
625        fid.close()
626
627
628
629
630
631#Function for storing xya output
632#FIXME Not done yet for this version
633class Data_format_xya(Data_format):
634    """Generic interface to data formats
635    """
636
637
638    def __init__(self, domain, mode = 'w'):
639        from Scientific.IO.NetCDF import NetCDFFile
640        from Numeric import Int, Float, Float32
641
642        self.precision = Float32 #Use single precision
643
644        Data_format.__init__(self, domain, 'xya', mode)
645
646
647
648    #FIXME -This is the old xya format
649    def store_all(self):
650        """Specialisation of store all for xya format
651
652        Writes x,y,z coordinates of triangles constituting
653        the bed elevation.
654        """
655
656        from Numeric import concatenate
657
658        domain = self.domain
659
660        fd = open(self.filename, 'w')
661
662
663        if domain.smooth is True:
664            number_of_points =  len(domain.vertexlist)
665        else:
666            number_of_points = 3*self.number_of_volumes
667
668        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
669
670        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
671                 " # <vertex #> <x> <y> [attributes]" + "\n")
672
673
674        # Get X, Y, bed elevation and friction (index=0,1)
675        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
676                                           indices = (0,1), precision = self.precision)
677
678        bed_eles = A[:,0]
679        fricts = A[:,1]
680
681        # Get stage (index=0)
682        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
683                                       indices = (0,), precision = self.precision)
684
685        stages = B[:,0]
686
687        #<vertex #> <x> <y> [attributes]
688        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
689                                               stages, fricts):
690
691            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
692            fd.write(s)
693
694        #close
695        fd.close()
696
697
698    def store_timestep(self, t, V0, V1, V2):
699        """Store time, water heights (and momentums) to file
700        """
701        pass
702
703
704#Auxiliary
705def write_obj(filename,x,y,z):
706    """Store x,y,z vectors into filename (obj format)
707       Vectors are assumed to have dimension (M,3) where
708       M corresponds to the number elements.
709       triangles are assumed to be disconnected
710
711       The three numbers in each vector correspond to three vertices,
712
713       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
714
715    """
716    #print 'Writing obj to %s' % filename
717
718    import os.path
719
720    root, ext = os.path.splitext(filename)
721    if ext == '.obj':
722        FN = filename
723    else:
724        FN = filename + '.obj'
725
726
727    outfile = open(FN, 'wb')
728    outfile.write("# Triangulation as an obj file\n")
729
730    M, N = x.shape
731    assert N==3  #Assuming three vertices per element
732
733    for i in range(M):
734        for j in range(N):
735            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
736
737    for i in range(M):
738        base = i*N
739        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
740
741    outfile.close()
742
743
744#########################################################
745#Conversion routines
746########################################################
747
748def sww2obj(basefilename, size):
749    """Convert netcdf based data output to obj
750    """
751    from Scientific.IO.NetCDF import NetCDFFile
752
753    from Numeric import Float, zeros
754
755    #Get NetCDF
756    FN = create_filename('.', basefilename, 'sww', size)
757    print 'Reading from ', FN
758    fid = NetCDFFile(FN, 'r')  #Open existing file for read
759
760
761    # Get the variables
762    x = fid.variables['x']
763    y = fid.variables['y']
764    z = fid.variables['elevation']
765    time = fid.variables['time']
766    stage = fid.variables['stage']
767
768    M = size  #Number of lines
769    xx = zeros((M,3), Float)
770    yy = zeros((M,3), Float)
771    zz = zeros((M,3), Float)
772
773    for i in range(M):
774        for j in range(3):
775            xx[i,j] = x[i+j*M]
776            yy[i,j] = y[i+j*M]
777            zz[i,j] = z[i+j*M]
778
779    #Write obj for bathymetry
780    FN = create_filename('.', basefilename, 'obj', size)
781    write_obj(FN,xx,yy,zz)
782
783
784    #Now read all the data with variable information, combine with
785    #x,y info and store as obj
786
787    for k in range(len(time)):
788        t = time[k]
789        print 'Processing timestep %f' %t
790
791        for i in range(M):
792            for j in range(3):
793                zz[i,j] = stage[k,i+j*M]
794
795
796        #Write obj for variable data
797        #FN = create_filename(basefilename, 'obj', size, time=t)
798        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
799        write_obj(FN,xx,yy,zz)
800
801
802def dat2obj(basefilename):
803    """Convert line based data output to obj
804    FIXME: Obsolete?
805    """
806
807    import glob, os
808    from config import data_dir
809
810
811    #Get bathymetry and x,y's
812    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
813
814    from Numeric import zeros, Float
815
816    M = len(lines)  #Number of lines
817    x = zeros((M,3), Float)
818    y = zeros((M,3), Float)
819    z = zeros((M,3), Float)
820
821    ##i = 0
822    for i, line in enumerate(lines):
823        tokens = line.split()
824        values = map(float,tokens)
825
826        for j in range(3):
827            x[i,j] = values[j*3]
828            y[i,j] = values[j*3+1]
829            z[i,j] = values[j*3+2]
830
831        ##i += 1
832
833
834    #Write obj for bathymetry
835    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
836
837
838    #Now read all the data files with variable information, combine with
839    #x,y info
840    #and store as obj
841
842    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
843
844    for filename in files:
845        print 'Processing %s' % filename
846
847        lines = open(data_dir+os.sep+filename,'r').readlines()
848        assert len(lines) == M
849        root, ext = os.path.splitext(filename)
850
851        #Get time from filename
852        i0 = filename.find('_time=')
853        if i0 == -1:
854            #Skip bathymetry file
855            continue
856
857        i0 += 6  #Position where time starts
858        i1 = filename.find('.dat')
859
860        if i1 > i0:
861            t = float(filename[i0:i1])
862        else:
863            raise 'Hmmmm'
864
865
866
867        ##i = 0
868        for i, line in enumerate(lines):
869            tokens = line.split()
870            values = map(float,tokens)
871
872            for j in range(3):
873                z[i,j] = values[j]
874
875            ##i += 1
876
877        #Write obj for variable data
878        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
879
880
881def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
882    """Read netcdf filename1, pick timesteps first:step:last and save to
883    nettcdf file filename2
884    """
885    from Scientific.IO.NetCDF import NetCDFFile
886
887    #Get NetCDF
888    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
889    outfile = NetCDFFile(filename2, 'w')  #Open new file
890
891
892    #Copy dimensions
893    for d in infile.dimensions:
894        outfile.createDimension(d, infile.dimensions[d])
895
896    for name in infile.variables:
897        var = infile.variables[name]
898        outfile.createVariable(name, var.typecode(), var.dimensions)
899
900
901    #Copy the static variables
902    for name in infile.variables:
903        if name == 'time' or name == 'stage':
904            pass
905        else:
906            #Copy
907            outfile.variables[name][:] = infile.variables[name][:]
908
909    #Copy selected timesteps
910    time = infile.variables['time']
911    stage = infile.variables['stage']
912
913    newtime = outfile.variables['time']
914    newstage = outfile.variables['stage']
915
916    if last is None:
917        last = len(time)
918
919    selection = range(first, last, step)
920    for i, j in enumerate(selection):
921        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
922        newtime[i] = time[j]
923        newstage[i,:] = stage[j,:]
924
925    #Close
926    infile.close()
927    outfile.close()
928
929
930#Get data objects
931def get_dataobject(domain, mode='w'):
932    """Return instance of class of given format using filename
933    """
934
935    cls = eval('Data_format_%s' %domain.format)
936    return cls(domain, mode)
937
938def xya2pts(basename_in, basename_out=None, verbose=False,
939            #easting_min=None, easting_max=None,
940            #northing_min=None, northing_max=None,
941            stride = 1,           
942            attribute_name = 'elevation',
943            z_func = None):
944    """Read points data from ascii (.xya)
945
946    Example:
947
948              x(m)        y(m)        z(m)
949         0.00000e+00  0.00000e+00  1.3535000e-01
950         0.00000e+00  1.40000e-02  1.3535000e-01
951
952   
953
954    Convert to NetCDF pts format which is
955
956    points:  (Nx2) Float array
957    elevation: N Float array
958
959    Only lines that contain three numeric values are processed
960
961    If z_func is specified, it will be applied to the third field
962    """
963
964    import os 
965    #from Scientific.IO.NetCDF import NetCDFFile
966    from Numeric import Float, arrayrange, concatenate
967
968    root, ext = os.path.splitext(basename_in)
969
970    if ext == '': ext = '.xya'
971
972    #Get NetCDF
973    infile = open(root + ext, 'r')  #Open existing xya file for read
974
975    if verbose: print 'Reading xya points from %s' %(root + ext)
976
977    points = []
978    attribute = []
979    for i, line in enumerate(infile.readlines()):
980
981        if i % stride != 0: continue
982       
983        fields = line.split()
984
985        try:
986            assert len(fields) == 3
987        except:   
988            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
989
990        try:
991            x = float( fields[0] )
992            y = float( fields[1] )
993            z = float( fields[2] )           
994        except:
995            continue
996
997        points.append( [x, y] )
998
999        if callable(z_func):
1000            attribute.append(z_func(z))           
1001        else:   
1002            attribute.append(z)
1003
1004
1005    #Get output file
1006    if basename_out == None:
1007        ptsname = root + '.pts'
1008    else:
1009        ptsname = basename_out + '.pts'
1010
1011    if verbose: print 'Store to NetCDF file %s' %ptsname
1012    write_ptsfile(ptsname, points, attribute, attribute_name)
1013
1014
1015def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1016    """Write points and associated attribute to pts (NetCDF) format
1017    """
1018
1019    from Numeric import Float
1020   
1021    if attribute_name is None:
1022        attribute_name = 'attribute'       
1023   
1024
1025    from Scientific.IO.NetCDF import NetCDFFile
1026   
1027    # NetCDF file definition
1028    outfile = NetCDFFile(ptsname, 'w')
1029
1030
1031    #Create new file
1032    outfile.institution = 'Geoscience Australia'
1033    outfile.description = 'NetCDF pts format for compact and '\
1034                          'portable storage of spatial point data'
1035   
1036
1037    #Georeferencing
1038    from coordinate_transforms.geo_reference import Geo_reference
1039    Geo_reference().write_NetCDF(outfile)
1040   
1041
1042    outfile.createDimension('number_of_points', len(points))
1043    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1044
1045    # variable definitions
1046    outfile.createVariable('points', Float, ('number_of_points',
1047                                             'number_of_dimensions'))
1048    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1049
1050    # Get handles to the variables
1051    nc_points = outfile.variables['points']
1052    nc_attribute = outfile.variables[attribute_name]
1053
1054    #Store data
1055    nc_points[:, :] = points
1056    nc_attribute[:] = attribute
1057
1058    outfile.close()
1059   
1060
1061def dem2pts(basename_in, basename_out=None, verbose=False,
1062            easting_min=None, easting_max=None,
1063            northing_min=None, northing_max=None):
1064    """Read Digitial Elevation model from the following NetCDF format (.dem)
1065
1066    Example:
1067
1068    ncols         3121
1069    nrows         1800
1070    xllcorner     722000
1071    yllcorner     5893000
1072    cellsize      25
1073    NODATA_value  -9999
1074    138.3698 137.4194 136.5062 135.5558 ..........
1075
1076    Convert to NetCDF pts format which is
1077
1078    points:  (Nx2) Float array
1079    elevation: N Float array
1080    """
1081
1082    #FIXME: Can this be written feasibly using write_pts?
1083
1084    import os
1085    from Scientific.IO.NetCDF import NetCDFFile
1086    from Numeric import Float, zeros, reshape
1087
1088    root = basename_in
1089
1090    #Get NetCDF
1091    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1092
1093    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1094
1095    ncols = infile.ncols[0]
1096    nrows = infile.nrows[0]
1097    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1098    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1099    cellsize = infile.cellsize[0]
1100    NODATA_value = infile.NODATA_value[0]
1101    dem_elevation = infile.variables['elevation']
1102
1103    zone = infile.zone[0]
1104    false_easting = infile.false_easting[0]
1105    false_northing = infile.false_northing[0]
1106
1107    #Text strings
1108    projection = infile.projection
1109    datum = infile.datum
1110    units = infile.units
1111
1112
1113    #Get output file
1114    if basename_out == None:
1115        ptsname = root + '.pts'
1116    else:
1117        ptsname = basename_out + '.pts'
1118
1119    if verbose: print 'Store to NetCDF file %s' %ptsname
1120    # NetCDF file definition
1121    outfile = NetCDFFile(ptsname, 'w')
1122
1123    #Create new file
1124    outfile.institution = 'Geoscience Australia'
1125    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1126                      'of spatial point data'
1127    #assign default values
1128    if easting_min is None: easting_min = xllcorner
1129    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1130    if northing_min is None: northing_min = yllcorner
1131    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1132
1133    #compute offsets to update georeferencing
1134    easting_offset = xllcorner - easting_min
1135    northing_offset = yllcorner - northing_min
1136
1137    #Georeferencing
1138    outfile.zone = zone
1139    outfile.xllcorner = easting_min #Easting of lower left corner
1140    outfile.yllcorner = northing_min #Northing of lower left corner
1141    outfile.false_easting = false_easting
1142    outfile.false_northing = false_northing
1143
1144    outfile.projection = projection
1145    outfile.datum = datum
1146    outfile.units = units
1147
1148
1149    #Grid info (FIXME: probably not going to be used, but heck)
1150    outfile.ncols = ncols
1151    outfile.nrows = nrows
1152
1153
1154    # dimension definitions
1155    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1156    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1157    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1158    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1159
1160    # variable definitions
1161    outfile.createVariable('points', Float, ('number_of_points',
1162                                             'number_of_dimensions'))
1163    outfile.createVariable('elevation', Float, ('number_of_points',))
1164
1165    # Get handles to the variables
1166    points = outfile.variables['points']
1167    elevation = outfile.variables['elevation']
1168
1169    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1170
1171    #Store data
1172    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1173    global_index = 0
1174    for i in range(nrows):
1175        if verbose and i%((nrows+10)/10)==0:
1176            print 'Processing row %d of %d' %(i, nrows)       
1177       
1178        lower_index = global_index
1179        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1180        telev =  zeros(ncols_in_bounding_box, Float)
1181        local_index = 0
1182
1183        y = (nrows-i)*cellsize + yllcorner
1184        for j in range(ncols):
1185
1186            x = j*cellsize + xllcorner
1187            if easting_min <= x <= easting_max and \
1188               northing_min <= y <= northing_max:
1189                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1190                telev[local_index] = dem_elevation_r[i, j]
1191                global_index += 1
1192                local_index += 1
1193
1194        upper_index = global_index
1195
1196        if upper_index == lower_index + ncols_in_bounding_box:
1197            points[lower_index:upper_index, :] = tpoints
1198            elevation[lower_index:upper_index] = telev
1199
1200    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1201
1202    infile.close()
1203    outfile.close()
1204
1205
1206def sww2asc(basename_in, basename_out = None,
1207            quantity = None,
1208            timestep = None,
1209            reduction = None,
1210            cellsize = 10,
1211            verbose = False,
1212            origin = None):
1213    """Read SWW file and convert to Digitial Elevation model format (.asc)
1214
1215    Example:
1216
1217    ncols         3121
1218    nrows         1800
1219    xllcorner     722000
1220    yllcorner     5893000
1221    cellsize      25
1222    NODATA_value  -9999
1223    138.3698 137.4194 136.5062 135.5558 ..........
1224
1225    Also write accompanying file with same basename_in but extension .prj
1226    used to fix the UTM zone, datum, false northings and eastings.
1227
1228    The prj format is assumed to be as
1229
1230    Projection    UTM
1231    Zone          56
1232    Datum         WGS84
1233    Zunits        NO
1234    Units         METERS
1235    Spheroid      WGS84
1236    Xshift        0.0000000000
1237    Yshift        10000000.0000000000
1238    Parameters
1239
1240
1241    if quantity is given, out values from quantity otherwise default to
1242    elevation
1243
1244    if timestep (an index) is given, output quantity at that timestep
1245
1246    if reduction is given use that to reduce quantity over all timesteps.
1247
1248    """
1249    from Numeric import array, Float, concatenate, NewAxis, zeros,\
1250         sometrue
1251
1252
1253    #FIXME: Should be variable
1254    datum = 'WGS84'
1255    false_easting = 500000
1256    false_northing = 10000000
1257    NODATA_value = -9999
1258
1259    if quantity is None:
1260        quantity = 'elevation'
1261
1262    if reduction is None:
1263        reduction = max
1264
1265    if basename_out is None:
1266        basename_out = basename_in + '_%s' %quantity
1267
1268    swwfile = basename_in + '.sww'
1269    ascfile = basename_out + '.asc'
1270    prjfile = basename_out + '.prj'
1271
1272
1273    if verbose: print 'Reading from %s' %swwfile
1274    #Read sww file
1275    from Scientific.IO.NetCDF import NetCDFFile
1276    fid = NetCDFFile(swwfile)
1277
1278    #Get extent and reference
1279    x = fid.variables['x'][:]
1280    y = fid.variables['y'][:]
1281    volumes = fid.variables['volumes'][:]
1282
1283    ymin = min(y); ymax = max(y)
1284    xmin = min(x); xmax = max(x)
1285
1286    number_of_timesteps = fid.dimensions['number_of_timesteps']
1287    number_of_points = fid.dimensions['number_of_points']
1288    if origin is None:
1289
1290        #Get geo_reference
1291        #sww files don't have to have a geo_ref
1292        try:
1293            geo_reference = Geo_reference(NetCDFObject=fid)
1294        except AttributeError, e:
1295            geo_reference = Geo_reference() #Default georef object
1296
1297        xllcorner = geo_reference.get_xllcorner()
1298        yllcorner = geo_reference.get_yllcorner()
1299        zone = geo_reference.get_zone()
1300    else:
1301        zone = origin[0]
1302        xllcorner = origin[1]
1303        yllcorner = origin[2]
1304
1305
1306    #Get quantity and reduce if applicable
1307    if verbose: print 'Reading quantity %s' %quantity
1308
1309    if quantity.lower() == 'depth':
1310        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
1311    else:
1312        q = fid.variables[quantity][:]
1313
1314
1315    if len(q.shape) == 2:
1316        if verbose: print 'Reducing quantity %s' %quantity
1317        q_reduced = zeros( number_of_points, Float )
1318
1319        for k in range(number_of_points):
1320            q_reduced[k] = reduction( q[:,k] )
1321
1322        q = q_reduced
1323
1324    #Now q has dimension: number_of_points
1325
1326
1327    #Write prj file
1328    if verbose: print 'Writing %s' %prjfile
1329    prjid = open(prjfile, 'w')
1330    prjid.write('Projection    %s\n' %'UTM')
1331    prjid.write('Zone          %d\n' %zone)
1332    prjid.write('Datum         %s\n' %datum)
1333    prjid.write('Zunits        NO\n')
1334    prjid.write('Units         METERS\n')
1335    prjid.write('Spheroid      %s\n' %datum)
1336    prjid.write('Xshift        %d\n' %false_easting)
1337    prjid.write('Yshift        %d\n' %false_northing)
1338    prjid.write('Parameters\n')
1339    prjid.close()
1340
1341    #Create grid and update xll/yll corner and x,y
1342    if verbose: print 'Creating grid'
1343    ncols = int((xmax-xmin)/cellsize)+1
1344    nrows = int((ymax-ymin)/cellsize)+1
1345
1346    newxllcorner = xmin+xllcorner
1347    newyllcorner = ymin+yllcorner
1348
1349    x = x+xllcorner-newxllcorner
1350    y = y+yllcorner-newyllcorner
1351
1352    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1353    assert len(vertex_points.shape) == 2
1354
1355
1356    from Numeric import zeros, Float
1357    grid_points = zeros ( (ncols*nrows, 2), Float )
1358
1359
1360    for i in xrange(nrows):
1361        yg = i*cellsize
1362        for j in xrange(ncols):
1363            xg = j*cellsize
1364            k = i*ncols + j
1365
1366            #print k, xg, yg
1367            grid_points[k,0] = xg
1368            grid_points[k,1] = yg
1369
1370
1371    #Interpolate
1372
1373    from least_squares import Interpolation
1374    from util import inside_polygon
1375
1376    #FIXME: This should be done with precrop = True, otherwise it'll
1377    #take forever. With expand_search  set to False, some grid points might
1378    #miss out....
1379
1380    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1381                           precrop = False, expand_search = False,
1382                           verbose = verbose)
1383
1384    #Interpolate using quantity values
1385    if verbose: print 'Interpolating'
1386    grid_values = interp.interpolate(q).flat
1387
1388    #Write
1389    if verbose: print 'Writing %s' %ascfile
1390    ascid = open(ascfile, 'w')
1391
1392    ascid.write('ncols         %d\n' %ncols)
1393    ascid.write('nrows         %d\n' %nrows)
1394    ascid.write('xllcorner     %d\n' %newxllcorner)
1395    ascid.write('yllcorner     %d\n' %newyllcorner)
1396    ascid.write('cellsize      %f\n' %cellsize)
1397    ascid.write('NODATA_value  %d\n' %NODATA_value)
1398
1399
1400    #Get bounding polygon from mesh
1401    P = interp.mesh.get_boundary_polygon()
1402    inside_indices = inside_polygon(grid_points, P)
1403
1404    for i in range(nrows):
1405        if verbose and i%((nrows+10)/10)==0:
1406            print 'Doing row %d of %d' %(i, nrows)
1407
1408        for j in range(ncols):
1409            index = (nrows-i-1)*ncols+j
1410
1411            if sometrue(inside_indices == index):
1412                ascid.write('%f ' %grid_values[index])
1413            else:
1414                ascid.write('%d ' %NODATA_value)
1415
1416        ascid.write('\n')
1417
1418    #Close
1419    ascid.close()
1420    fid.close()
1421
1422
1423
1424
1425def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1426                                  verbose=False):
1427    """Read Digitial Elevation model from the following ASCII format (.asc)
1428
1429    Example:
1430
1431    ncols         3121
1432    nrows         1800
1433    xllcorner     722000
1434    yllcorner     5893000
1435    cellsize      25
1436    NODATA_value  -9999
1437    138.3698 137.4194 136.5062 135.5558 ..........
1438
1439    Convert basename_in + '.asc' to NetCDF format (.dem)
1440    mimicking the ASCII format closely.
1441
1442
1443    An accompanying file with same basename_in but extension .prj must exist
1444    and is used to fix the UTM zone, datum, false northings and eastings.
1445
1446    The prj format is assumed to be as
1447
1448    Projection    UTM
1449    Zone          56
1450    Datum         WGS84
1451    Zunits        NO
1452    Units         METERS
1453    Spheroid      WGS84
1454    Xshift        0.0000000000
1455    Yshift        10000000.0000000000
1456    Parameters
1457    """
1458
1459    import os
1460    from Scientific.IO.NetCDF import NetCDFFile
1461    from Numeric import Float, array
1462
1463    #root, ext = os.path.splitext(basename_in)
1464    root = basename_in
1465
1466    ###########################################
1467    # Read Meta data
1468    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1469    metadatafile = open(root + '.prj')
1470    metalines = metadatafile.readlines()
1471    metadatafile.close()
1472
1473    L = metalines[0].strip().split()
1474    assert L[0].strip().lower() == 'projection'
1475    projection = L[1].strip()                   #TEXT
1476
1477    L = metalines[1].strip().split()
1478    assert L[0].strip().lower() == 'zone'
1479    zone = int(L[1].strip())
1480
1481    L = metalines[2].strip().split()
1482    assert L[0].strip().lower() == 'datum'
1483    datum = L[1].strip()                        #TEXT
1484
1485    L = metalines[3].strip().split()
1486    assert L[0].strip().lower() == 'zunits'     #IGNORE
1487    zunits = L[1].strip()                       #TEXT
1488
1489    L = metalines[4].strip().split()
1490    assert L[0].strip().lower() == 'units'
1491    units = L[1].strip()                        #TEXT
1492
1493    L = metalines[5].strip().split()
1494    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1495    spheroid = L[1].strip()                     #TEXT
1496
1497    L = metalines[6].strip().split()
1498    assert L[0].strip().lower() == 'xshift'
1499    false_easting = float(L[1].strip())
1500
1501    L = metalines[7].strip().split()
1502    assert L[0].strip().lower() == 'yshift'
1503    false_northing = float(L[1].strip())
1504
1505    #print false_easting, false_northing, zone, datum
1506
1507
1508    ###########################################
1509    #Read DEM data
1510
1511    datafile = open(basename_in + '.asc')
1512
1513    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1514    lines = datafile.readlines()
1515    datafile.close()
1516
1517    if verbose: print 'Got', len(lines), ' lines'
1518
1519    ncols = int(lines[0].split()[1].strip())
1520    nrows = int(lines[1].split()[1].strip())
1521    xllcorner = float(lines[2].split()[1].strip())
1522    yllcorner = float(lines[3].split()[1].strip())
1523    cellsize = float(lines[4].split()[1].strip())
1524    NODATA_value = int(lines[5].split()[1].strip())
1525
1526    assert len(lines) == nrows + 6
1527
1528
1529    ##########################################
1530
1531
1532    if basename_out == None:
1533        netcdfname = root + '.dem'
1534    else:
1535        netcdfname = basename_out + '.dem'
1536
1537    if verbose: print 'Store to NetCDF file %s' %netcdfname
1538    # NetCDF file definition
1539    fid = NetCDFFile(netcdfname, 'w')
1540
1541    #Create new file
1542    fid.institution = 'Geoscience Australia'
1543    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1544                      'of spatial point data'
1545
1546    fid.ncols = ncols
1547    fid.nrows = nrows
1548    fid.xllcorner = xllcorner
1549    fid.yllcorner = yllcorner
1550    fid.cellsize = cellsize
1551    fid.NODATA_value = NODATA_value
1552
1553    fid.zone = zone
1554    fid.false_easting = false_easting
1555    fid.false_northing = false_northing
1556    fid.projection = projection
1557    fid.datum = datum
1558    fid.units = units
1559
1560
1561    # dimension definitions
1562    fid.createDimension('number_of_rows', nrows)
1563    fid.createDimension('number_of_columns', ncols)
1564
1565    # variable definitions
1566    fid.createVariable('elevation', Float, ('number_of_rows',
1567                                            'number_of_columns'))
1568
1569    # Get handles to the variables
1570    elevation = fid.variables['elevation']
1571
1572    #Store data
1573    n = len(lines[6:])
1574    for i, line in enumerate(lines[6:]):
1575        fields = line.split()
1576        if verbose and i%((n+10)/10)==0:
1577            print 'Processing row %d of %d' %(i, nrows)           
1578
1579        elevation[i, :] = array([float(x) for x in fields])
1580
1581    fid.close()
1582
1583
1584
1585
1586
1587def ferret2sww(basename_in, basename_out = None,
1588               verbose = False,
1589               minlat = None, maxlat = None,
1590               minlon = None, maxlon = None,
1591               mint = None, maxt = None, mean_stage = 0,
1592               origin = None, zscale = 1,
1593               fail_on_NaN = True,
1594               NaN_filler = 0,
1595               elevation = None,
1596               inverted_bathymetry = False
1597               ): #FIXME: Bathymetry should be obtained
1598                                  #from MOST somehow.
1599                                  #Alternatively from elsewhere
1600                                  #or, as a last resort,
1601                                  #specified here.
1602                                  #The value of -100 will work
1603                                  #for the Wollongong tsunami
1604                                  #scenario but is very hacky
1605    """Convert 'Ferret' NetCDF format for wave propagation to
1606    sww format native to pyvolution.
1607
1608    Specify only basename_in and read files of the form
1609    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1610    relative height, x-velocity and y-velocity, respectively.
1611
1612    Also convert latitude and longitude to UTM. All coordinates are
1613    assumed to be given in the GDA94 datum.
1614
1615    min's and max's: If omitted - full extend is used.
1616    To include a value min may equal it, while max must exceed it.
1617    Lat and lon are assuemd to be in decimal degrees
1618
1619    origin is a 3-tuple with geo referenced
1620    UTM coordinates (zone, easting, northing)
1621
1622    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1623    which means that longitude is the fastest
1624    varying dimension (row major order, so to speak)
1625
1626    ferret2sww uses grid points as vertices in a triangular grid
1627    counting vertices from lower left corner upwards, then right
1628    """
1629
1630    import os
1631    from Scientific.IO.NetCDF import NetCDFFile
1632    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1633    precision = Float
1634
1635
1636    #Get NetCDF data
1637    if verbose: print 'Reading files %s_*.nc' %basename_in
1638    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1639    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1640    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1641    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1642
1643    if basename_out is None:
1644        swwname = basename_in + '.sww'
1645    else:
1646        swwname = basename_out + '.sww'
1647
1648    times = file_h.variables['TIME']
1649    latitudes = file_h.variables['LAT']
1650    longitudes = file_h.variables['LON']
1651
1652
1653    e_lat = file_e.variables['LAT'][:]
1654
1655
1656    #Check that files are compatible
1657    from Numeric import allclose
1658    assert allclose(latitudes, file_u.variables['LAT'])
1659    assert allclose(latitudes, file_v.variables['LAT'])   
1660    assert allclose(latitudes, file_e.variables['LAT'])
1661
1662    assert allclose(longitudes, file_u.variables['LON'])
1663    assert allclose(longitudes, file_v.variables['LON'])
1664    assert allclose(longitudes, file_e.variables['LON'])   
1665
1666
1667
1668    if mint == None:
1669        jmin = 0
1670    else:
1671        jmin = searchsorted(times, mint)
1672
1673    if maxt == None:
1674        jmax=len(times)
1675    else:
1676        jmax = searchsorted(times, maxt)
1677
1678    if minlat == None:
1679        kmin=0
1680    else:
1681        kmin = searchsorted(latitudes, minlat)
1682
1683    if maxlat == None:
1684        kmax = len(latitudes)
1685    else:
1686        kmax = searchsorted(latitudes, maxlat)
1687
1688    if minlon == None:
1689        lmin=0
1690    else:
1691        lmin = searchsorted(longitudes, minlon)
1692
1693    if maxlon == None:
1694        lmax = len(longitudes)
1695    else:
1696        lmax = searchsorted(longitudes, maxlon)
1697
1698
1699
1700    times = times[jmin:jmax]
1701    latitudes = latitudes[kmin:kmax]
1702    longitudes = longitudes[lmin:lmax]
1703
1704
1705    if verbose: print 'cropping'
1706    zname = 'ELEVATION'
1707
1708   
1709    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1710    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1711    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1712    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1713
1714#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1715#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1716#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1717#        from Numeric import asarray
1718#        elevations=elevations.tolist()
1719#        elevations.reverse()
1720#        elevations=asarray(elevations)
1721#    else:
1722#        from Numeric import asarray
1723#        elevations=elevations.tolist()
1724#        elevations.reverse()
1725#        elevations=asarray(elevations)
1726#        'print hmmm'
1727
1728
1729
1730    #Get missing values
1731    nan_ha = file_h.variables['HA'].missing_value[0]
1732    nan_ua = file_u.variables['UA'].missing_value[0]
1733    nan_va = file_v.variables['VA'].missing_value[0]
1734    if hasattr(file_e.variables[zname],'missing_value'):
1735        nan_e  = file_e.variables[zname].missing_value[0]
1736    else:
1737        nan_e = None
1738
1739    #Cleanup
1740    from Numeric import sometrue
1741
1742    missing = (amplitudes == nan_ha)
1743    if sometrue (missing):
1744        if fail_on_NaN:
1745            msg = 'NetCDFFile %s contains missing values'\
1746                  %(basename_in+'_ha.nc')
1747            raise msg
1748        else:
1749            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1750
1751    missing = (uspeed == nan_ua)
1752    if sometrue (missing):
1753        if fail_on_NaN:
1754            msg = 'NetCDFFile %s contains missing values'\
1755                  %(basename_in+'_ua.nc')
1756            raise msg
1757        else:
1758            uspeed = uspeed*(missing==0) + missing*NaN_filler
1759
1760    missing = (vspeed == nan_va)
1761    if sometrue (missing):
1762        if fail_on_NaN:
1763            msg = 'NetCDFFile %s contains missing values'\
1764                  %(basename_in+'_va.nc')
1765            raise msg
1766        else:
1767            vspeed = vspeed*(missing==0) + missing*NaN_filler
1768
1769
1770    missing = (elevations == nan_e)
1771    if sometrue (missing):
1772        if fail_on_NaN:
1773            msg = 'NetCDFFile %s contains missing values'\
1774                  %(basename_in+'_e.nc')
1775            raise msg
1776        else:
1777            elevations = elevations*(missing==0) + missing*NaN_filler
1778
1779    #######
1780
1781
1782
1783    number_of_times = times.shape[0]
1784    number_of_latitudes = latitudes.shape[0]
1785    number_of_longitudes = longitudes.shape[0]
1786
1787    assert amplitudes.shape[0] == number_of_times
1788    assert amplitudes.shape[1] == number_of_latitudes
1789    assert amplitudes.shape[2] == number_of_longitudes
1790
1791    if verbose:
1792        print '------------------------------------------------'
1793        print 'Statistics:'
1794        print '  Extent (lat/lon):'
1795        print '    lat in [%f, %f], len(lat) == %d'\
1796              %(min(latitudes.flat), max(latitudes.flat),
1797                len(latitudes.flat))
1798        print '    lon in [%f, %f], len(lon) == %d'\
1799              %(min(longitudes.flat), max(longitudes.flat),
1800                len(longitudes.flat))
1801        print '    t in [%f, %f], len(t) == %d'\
1802              %(min(times.flat), max(times.flat), len(times.flat))
1803
1804        q = amplitudes.flat
1805        name = 'Amplitudes (ha) [cm]'
1806        print %s in [%f, %f]' %(name, min(q), max(q))
1807
1808        q = uspeed.flat
1809        name = 'Speeds (ua) [cm/s]'
1810        print %s in [%f, %f]' %(name, min(q), max(q))
1811
1812        q = vspeed.flat
1813        name = 'Speeds (va) [cm/s]'
1814        print %s in [%f, %f]' %(name, min(q), max(q))
1815
1816        q = elevations.flat
1817        name = 'Elevations (e) [m]'
1818        print %s in [%f, %f]' %(name, min(q), max(q))
1819
1820
1821    #print number_of_latitudes, number_of_longitudes
1822    number_of_points = number_of_latitudes*number_of_longitudes
1823    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1824
1825
1826    file_h.close()
1827    file_u.close()
1828    file_v.close()
1829    file_e.close()
1830
1831
1832    # NetCDF file definition
1833    outfile = NetCDFFile(swwname, 'w')
1834
1835    #Create new file
1836    outfile.institution = 'Geoscience Australia'
1837    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1838                          %(basename_in + '_ha.nc',
1839                            basename_in + '_ua.nc',
1840                            basename_in + '_va.nc',
1841                            basename_in + '_e.nc')
1842
1843
1844    #For sww compatibility
1845    outfile.smoothing = 'Yes'
1846    outfile.order = 1
1847
1848    #Start time in seconds since the epoch (midnight 1/1/1970)
1849    outfile.starttime = starttime = times[0]
1850    times = times - starttime  #Store relative times
1851
1852    # dimension definitions
1853    outfile.createDimension('number_of_volumes', number_of_volumes)
1854
1855    outfile.createDimension('number_of_vertices', 3)
1856    outfile.createDimension('number_of_points', number_of_points)
1857
1858
1859    #outfile.createDimension('number_of_timesteps', len(times))
1860    outfile.createDimension('number_of_timesteps', len(times))
1861
1862    # variable definitions
1863    outfile.createVariable('x', precision, ('number_of_points',))
1864    outfile.createVariable('y', precision, ('number_of_points',))
1865    outfile.createVariable('elevation', precision, ('number_of_points',))
1866
1867    #FIXME: Backwards compatibility
1868    outfile.createVariable('z', precision, ('number_of_points',))
1869    #################################
1870
1871    outfile.createVariable('volumes', Int, ('number_of_volumes',
1872                                            'number_of_vertices'))
1873
1874    outfile.createVariable('time', precision,
1875                           ('number_of_timesteps',))
1876
1877    outfile.createVariable('stage', precision,
1878                           ('number_of_timesteps',
1879                            'number_of_points'))
1880
1881    outfile.createVariable('xmomentum', precision,
1882                           ('number_of_timesteps',
1883                            'number_of_points'))
1884
1885    outfile.createVariable('ymomentum', precision,
1886                           ('number_of_timesteps',
1887                            'number_of_points'))
1888
1889
1890    #Store
1891    from coordinate_transforms.redfearn import redfearn
1892    x = zeros(number_of_points, Float)  #Easting
1893    y = zeros(number_of_points, Float)  #Northing
1894
1895
1896    if verbose: print 'Making triangular grid'
1897    #Check zone boundaries
1898    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
1899
1900    vertices = {}
1901    i = 0
1902    for k, lat in enumerate(latitudes):       #Y direction
1903        for l, lon in enumerate(longitudes):  #X direction
1904
1905            vertices[l,k] = i
1906
1907            zone, easting, northing = redfearn(lat,lon)
1908
1909            msg = 'Zone boundary crossed at longitude =', lon
1910            #assert zone == refzone, msg
1911            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
1912            x[i] = easting
1913            y[i] = northing
1914            i += 1
1915
1916
1917    #Construct 2 triangles per 'rectangular' element
1918    volumes = []
1919    for l in range(number_of_longitudes-1):    #X direction
1920        for k in range(number_of_latitudes-1): #Y direction
1921            v1 = vertices[l,k+1]
1922            v2 = vertices[l,k]
1923            v3 = vertices[l+1,k+1]
1924            v4 = vertices[l+1,k]
1925
1926            volumes.append([v1,v2,v3]) #Upper element
1927            volumes.append([v4,v3,v2]) #Lower element
1928
1929    volumes = array(volumes)
1930
1931    if origin == None:
1932        zone = refzone
1933        xllcorner = min(x)
1934        yllcorner = min(y)
1935    else:
1936        zone = origin[0]
1937        xllcorner = origin[1]
1938        yllcorner = origin[2]
1939
1940
1941    outfile.xllcorner = xllcorner
1942    outfile.yllcorner = yllcorner
1943    outfile.zone = zone
1944
1945
1946    if elevation is not None:
1947        z = elevation
1948    else:
1949        if inverted_bathymetry:
1950            z = -1*elevations
1951        else:
1952            z = elevations
1953        #FIXME: z should be obtained from MOST and passed in here
1954
1955    from Numeric import resize
1956    z = resize(z,outfile.variables['z'][:].shape)
1957    outfile.variables['x'][:] = x - xllcorner
1958    outfile.variables['y'][:] = y - yllcorner
1959    outfile.variables['z'][:] = z
1960    outfile.variables['elevation'][:] = z  #FIXME HACK
1961    outfile.variables['time'][:] = times   #Store time relative
1962    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
1963
1964
1965
1966    #Time stepping
1967    stage = outfile.variables['stage']
1968    xmomentum = outfile.variables['xmomentum']
1969    ymomentum = outfile.variables['ymomentum']
1970
1971    if verbose: print 'Converting quantities'
1972    n = len(times)
1973    for j in range(n):
1974        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
1975        i = 0
1976        for k in range(number_of_latitudes):      #Y direction
1977            for l in range(number_of_longitudes): #X direction
1978                w = zscale*amplitudes[j,k,l]/100 + mean_stage
1979                stage[j,i] = w
1980                h = w - z[i]
1981                xmomentum[j,i] = uspeed[j,k,l]/100*h
1982                ymomentum[j,i] = vspeed[j,k,l]/100*h
1983                i += 1
1984
1985
1986    if verbose:
1987        x = outfile.variables['x'][:]
1988        y = outfile.variables['y'][:]
1989        print '------------------------------------------------'
1990        print 'Statistics of output file:'
1991        print '  Name: %s' %swwname
1992        print '  Reference:'
1993        print '    Lower left corner: [%f, %f]'\
1994              %(xllcorner, yllcorner)
1995        print '    Start time: %f' %starttime
1996        print '  Extent:'
1997        print '    x [m] in [%f, %f], len(x) == %d'\
1998              %(min(x.flat), max(x.flat), len(x.flat))
1999        print '    y [m] in [%f, %f], len(y) == %d'\
2000              %(min(y.flat), max(y.flat), len(y.flat))
2001        print '    t [s] in [%f, %f], len(t) == %d'\
2002              %(min(times), max(times), len(times))
2003        print '  Quantities [SI units]:'
2004        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2005            q = outfile.variables[name][:].flat
2006            print '    %s in [%f, %f]' %(name, min(q), max(q))
2007
2008
2009
2010
2011    outfile.close()
2012
2013
2014
2015
2016def timefile2swww(filename, quantity_names = None):
2017    """Template for converting typical text files with time series to
2018    NetCDF sww file.
2019
2020
2021    The file format is assumed to be either two fields separated by a comma:
2022
2023        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2024
2025    E.g
2026
2027      31/08/04 00:00:00, 1.328223 0 0
2028      31/08/04 00:15:00, 1.292912 0 0
2029
2030    will provide a time dependent function f(t) with three attributes
2031
2032    filename is assumed to be the rootname with extenisons .txt and .sww
2033    """
2034
2035    import time, calendar
2036    from Numeric import array
2037    from config import time_format
2038    from util import ensure_numeric
2039
2040
2041    fid = open(filename + '.txt')
2042    line = fid.readline()
2043    fid.close()
2044
2045    fields = line.split(',')
2046    msg = 'File %s must have the format date, value0 value1 value2 ...'
2047    assert len(fields) == 2, msg
2048
2049    try:
2050        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2051    except ValueError:
2052        msg = 'First field in file %s must be' %filename
2053        msg += ' date-time with format %s.\n' %time_format
2054        msg += 'I got %s instead.' %fields[0]
2055        raise msg
2056
2057
2058    #Split values
2059    values = []
2060    for value in fields[1].split():
2061        values.append(float(value))
2062
2063    q = ensure_numeric(values)
2064
2065    msg = 'ERROR: File must contain at least one independent value'
2066    assert len(q.shape) == 1, msg
2067
2068
2069
2070    #Read times proper
2071    from Numeric import zeros, Float, alltrue
2072    from config import time_format
2073    import time, calendar
2074
2075    fid = open(filename + '.txt')
2076    lines = fid.readlines()
2077    fid.close()
2078
2079    N = len(lines)
2080    d = len(q)
2081
2082    T = zeros(N, Float)       #Time
2083    Q = zeros((N, d), Float)  #Values
2084
2085    for i, line in enumerate(lines):
2086        fields = line.split(',')
2087        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2088       
2089        T[i] = realtime - starttime
2090
2091        for j, value in enumerate(fields[1].split()):
2092            Q[i, j] = float(value)
2093
2094    msg = 'File %s must list time as a monotonuosly ' %filename
2095    msg += 'increasing sequence'
2096    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2097
2098
2099    #Create sww file
2100    from Scientific.IO.NetCDF import NetCDFFile
2101
2102       
2103
2104    fid = NetCDFFile(filename + '.sww', 'w')
2105
2106
2107    fid.institution = 'Geoscience Australia'
2108    fid.description = 'Time series'
2109
2110
2111    #Reference point
2112    #Start time in seconds since the epoch (midnight 1/1/1970)
2113    #FIXME: Use Georef
2114    fid.starttime = starttime
2115   
2116   
2117    # dimension definitions
2118    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2119    #fid.createDimension('number_of_vertices', 3)
2120
2121
2122    fid.createDimension('number_of_timesteps', len(T))
2123
2124    #if geo_reference is not None:
2125    #    geo_reference.write_NetCDF(fid)
2126               
2127
2128    fid.createVariable('time', Float, ('number_of_timesteps',))
2129
2130    fid.variables['time'][:] = T
2131   
2132    for i in range(Q.shape[1]):
2133        try:
2134            name = quantity_names[i]
2135        except:   
2136            name = 'Attribute%d'%i
2137           
2138        fid.createVariable(name, Float, ('number_of_timesteps',))
2139        fid.variables[name][:] = Q[:,i]       
2140
2141    fid.close()
2142
2143
2144def extent_sww(file_name):
2145    """
2146    Read in an sww file.
2147
2148    Input;
2149    file_name - the sww file
2150
2151    Output;
2152    z - Vector of bed elevation
2153    volumes - Array.  Each row has 3 values, representing
2154    the vertices that define the volume
2155    time - Vector of the times where there is stage information
2156    stage - array with respect to time and vertices (x,y)
2157    """
2158
2159
2160    from Scientific.IO.NetCDF import NetCDFFile
2161
2162    #Check contents
2163    #Get NetCDF
2164    fid = NetCDFFile(file_name, 'r')
2165
2166    # Get the variables
2167    x = fid.variables['x'][:]
2168    y = fid.variables['y'][:]
2169    stage = fid.variables['stage'][:]
2170    #print "stage",stage
2171    #print "stage.shap",stage.shape
2172    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2173    #print "min(stage)",min(stage)
2174
2175    fid.close()
2176
2177    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2178
2179
2180def sww2domain(filename,boundary=None,t=None,\
2181               fail_if_NaN=True,NaN_filler=0\
2182               ,verbose = True,very_verbose = False):
2183    """
2184    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2185
2186    Boundary is not recommended if domian.smooth is not selected, as it
2187    uses unique coordinates, but not unique boundaries. This means that
2188    the boundary file will not be compatable with the coordiantes, and will
2189    give a different final boundary, or crash.
2190    """
2191    NaN=9.969209968386869e+036
2192    #initialise NaN.
2193
2194    from Scientific.IO.NetCDF import NetCDFFile
2195    from shallow_water import Domain
2196    from Numeric import asarray, transpose, resize
2197
2198    if verbose: print 'Reading from ', filename
2199    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2200    time = fid.variables['time']       #Timesteps
2201    if t is None:
2202        t = time[-1]
2203    time_interp = get_time_interp(time,t)
2204
2205    # Get the variables as Numeric arrays
2206    x = fid.variables['x'][:]             #x-coordinates of vertices
2207    y = fid.variables['y'][:]             #y-coordinates of vertices
2208    elevation = fid.variables['elevation']     #Elevation
2209    stage = fid.variables['stage']     #Water level
2210    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2211    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2212
2213    starttime = fid.starttime[0]
2214    volumes = fid.variables['volumes'][:] #Connectivity
2215    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2216
2217    conserved_quantities = []
2218    interpolated_quantities = {}
2219    other_quantities = []
2220
2221    # get geo_reference
2222    #sww files don't have to have a geo_ref
2223    try:
2224        geo_reference = Geo_reference(NetCDFObject=fid)
2225    except: #AttributeError, e:
2226        geo_reference = None
2227
2228    if verbose: print '    getting quantities'
2229    for quantity in fid.variables.keys():
2230        dimensions = fid.variables[quantity].dimensions
2231        if 'number_of_timesteps' in dimensions:
2232            conserved_quantities.append(quantity)
2233            interpolated_quantities[quantity]=\
2234                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2235        else: other_quantities.append(quantity)
2236
2237    other_quantities.remove('x')
2238    other_quantities.remove('y')
2239    other_quantities.remove('z')
2240    other_quantities.remove('volumes')
2241
2242    conserved_quantities.remove('time')
2243
2244    if verbose: print '    building domain'
2245#    From domain.Domain:
2246#    domain = Domain(coordinates, volumes,\
2247#                    conserved_quantities = conserved_quantities,\
2248#                    other_quantities = other_quantities,zone=zone,\
2249#                    xllcorner=xllcorner, yllcorner=yllcorner)
2250
2251#   From shallow_water.Domain:
2252    coordinates=coordinates.tolist()
2253    volumes=volumes.tolist()
2254    #FIXME:should this be in mesh?(peter row)
2255    if fid.smoothing == 'Yes': unique = False
2256    else: unique = True
2257    if unique:
2258        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2259
2260
2261    domain = Domain(coordinates, volumes, boundary)
2262
2263    if not boundary is None:
2264        domain.boundary = boundary
2265
2266    domain.geo_reference = geo_reference
2267
2268    domain.starttime=float(starttime)+float(t)
2269    domain.time=0.0
2270
2271    for quantity in other_quantities:
2272        try:
2273            NaN = fid.variables[quantity].missing_value
2274        except:
2275            pass #quantity has no missing_value number
2276        X = fid.variables[quantity][:]
2277        if very_verbose:
2278            print '       ',quantity
2279            print '        NaN =',NaN
2280            print '        max(X)'
2281            print '       ',max(X)
2282            print '        max(X)==NaN'
2283            print '       ',max(X)==NaN
2284            print ''
2285        if (max(X)==NaN) or (min(X)==NaN):
2286            if fail_if_NaN:
2287                msg = 'quantity "%s" contains no_data entry'%quantity
2288                raise msg
2289            else:
2290                data = (X<>NaN)
2291                X = (X*data)+(data==0)*NaN_filler
2292        if unique:
2293            X = resize(X,(len(X)/3,3))
2294        domain.set_quantity(quantity,X)
2295#
2296    for quantity in conserved_quantities:
2297        try:
2298            NaN = fid.variables[quantity].missing_value
2299        except:
2300            pass #quantity has no missing_value number
2301        X = interpolated_quantities[quantity]
2302        if very_verbose:
2303            print '       ',quantity
2304            print '        NaN =',NaN
2305            print '        max(X)'
2306            print '       ',max(X)
2307            print '        max(X)==NaN'
2308            print '       ',max(X)==NaN
2309            print ''
2310        if (max(X)==NaN) or (min(X)==NaN):
2311            if fail_if_NaN:
2312                msg = 'quantity "%s" contains no_data entry'%quantity
2313                raise msg
2314            else:
2315                data = (X<>NaN)
2316                X = (X*data)+(data==0)*NaN_filler
2317        if unique:
2318            X = resize(X,(X.shape[0]/3,3))
2319        domain.set_quantity(quantity,X)
2320    fid.close()
2321    return domain
2322
2323def interpolated_quantity(saved_quantity,time_interp):
2324
2325    #given an index and ratio, interpolate quantity with respect to time.
2326    index,ratio = time_interp
2327    Q = saved_quantity
2328    if ratio > 0:
2329        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2330    else:
2331        q = Q[index]
2332    #Return vector of interpolated values
2333    return q
2334
2335def get_time_interp(time,t=None):
2336    #Finds the ratio and index for time interpolation.
2337    #It is borrowed from previous pyvolution code.
2338    if t is None:
2339        t=time[-1]
2340        index = -1
2341        ratio = 0.
2342    else:
2343        T = time
2344        tau = t
2345        index=0
2346        msg = 'Time interval derived from file %s [%s:%s]'\
2347            %('FIXMEfilename', T[0], T[-1])
2348        msg += ' does not match model time: %s' %tau
2349        if tau < time[0]: raise msg
2350        if tau > time[-1]: raise msg
2351        while tau > time[index]: index += 1
2352        while tau < time[index]: index -= 1
2353        if tau == time[index]:
2354            #Protect against case where tau == time[-1] (last time)
2355            # - also works in general when tau == time[i]
2356            ratio = 0
2357        else:
2358            #t is now between index and index+1
2359            ratio = (tau - time[index])/(time[index+1] - time[index])
2360    return (index,ratio)
2361
2362
2363def weed(coordinates,volumes,boundary = None):
2364    if type(coordinates)=='array':
2365        coordinates = coordinates.tolist()
2366    if type(volumes)=='array':
2367        volumes = volumes.tolist()
2368
2369    unique = False
2370    point_dict = {}
2371    same_point = {}
2372    for i in range(len(coordinates)):
2373        point = tuple(coordinates[i])
2374        if point_dict.has_key(point):
2375            unique = True
2376            same_point[i]=point
2377            #to change all point i references to point j
2378        else:
2379            point_dict[point]=i
2380            same_point[i]=point
2381
2382    coordinates = []
2383    i = 0
2384    for point in point_dict.keys():
2385        point = tuple(point)
2386        coordinates.append(list(point))
2387        point_dict[point]=i
2388        i+=1
2389
2390
2391    for volume in volumes:
2392        for i in range(len(volume)):
2393            index = volume[i]
2394            if index>-1:
2395                volume[i]=point_dict[same_point[index]]
2396
2397    new_boundary = {}
2398    if not boundary is None:
2399        for segment in boundary.keys():
2400            point0 = point_dict[same_point[segment[0]]]
2401            point1 = point_dict[same_point[segment[1]]]
2402            label = boundary[segment]
2403            #FIXME should the bounday attributes be concaterated
2404            #('exterior, pond') or replaced ('pond')(peter row)
2405
2406            if new_boundary.has_key((point0,point1)):
2407                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2408                                              #+','+label
2409
2410            elif new_boundary.has_key((point1,point0)):
2411                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2412                                              #+','+label
2413            else: new_boundary[(point0,point1)]=label
2414
2415        boundary = new_boundary
2416
2417    return coordinates,volumes,boundary
2418
2419
2420
2421#OBSOLETE STUFF
2422#Native checkpoint format.
2423#Information needed to recreate a state is preserved
2424#FIXME: Rethink and maybe use netcdf format
2425def cpt_variable_writer(filename, t, v0, v1, v2):
2426    """Store all conserved quantities to file
2427    """
2428
2429    M, N = v0.shape
2430
2431    FN = create_filename(filename, 'cpt', M, t)
2432    #print 'Writing to %s' %FN
2433
2434    fid = open(FN, 'w')
2435    for i in range(M):
2436        for j in range(N):
2437            fid.write('%.16e ' %v0[i,j])
2438        for j in range(N):
2439            fid.write('%.16e ' %v1[i,j])
2440        for j in range(N):
2441            fid.write('%.16e ' %v2[i,j])
2442
2443        fid.write('\n')
2444    fid.close()
2445
2446
2447def cpt_variable_reader(filename, t, v0, v1, v2):
2448    """Store all conserved quantities to file
2449    """
2450
2451    M, N = v0.shape
2452
2453    FN = create_filename(filename, 'cpt', M, t)
2454    #print 'Reading from %s' %FN
2455
2456    fid = open(FN)
2457
2458
2459    for i in range(M):
2460        values = fid.readline().split() #Get one line
2461
2462        for j in range(N):
2463            v0[i,j] = float(values[j])
2464            v1[i,j] = float(values[3+j])
2465            v2[i,j] = float(values[6+j])
2466
2467    fid.close()
2468
2469def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2470    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2471    elevation.
2472    Not in use pt
2473    """
2474
2475    M, N = v0.shape
2476
2477    print X0
2478    import sys; sys.exit()
2479    FN = create_filename(filename, 'cpt', M)
2480    print 'Writing to %s' %FN
2481
2482    fid = open(FN, 'w')
2483    for i in range(M):
2484        for j in range(2):
2485            fid.write('%.16e ' %X0[i,j])   #x, y
2486        for j in range(N):
2487            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2488
2489        for j in range(2):
2490            fid.write('%.16e ' %X1[i,j])   #x, y
2491        for j in range(N):
2492            fid.write('%.16e ' %v1[i,j])
2493
2494        for j in range(2):
2495            fid.write('%.16e ' %X2[i,j])   #x, y
2496        for j in range(N):
2497            fid.write('%.16e ' %v2[i,j])
2498
2499        fid.write('\n')
2500    fid.close()
2501
2502
2503
2504#Function for storing out to e.g. visualisation
2505#FIXME: Do we want this?
2506#FIXME: Not done yet for this version
2507def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2508    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2509    """
2510
2511    M, N = v0.shape
2512
2513    FN = create_filename(filename, 'dat', M)
2514    #print 'Writing to %s' %FN
2515
2516    fid = open(FN, 'w')
2517    for i in range(M):
2518        for j in range(2):
2519            fid.write('%f ' %X0[i,j])   #x, y
2520        fid.write('%f ' %v0[i,0])       #z
2521
2522        for j in range(2):
2523            fid.write('%f ' %X1[i,j])   #x, y
2524        fid.write('%f ' %v1[i,0])       #z
2525
2526        for j in range(2):
2527            fid.write('%f ' %X2[i,j])   #x, y
2528        fid.write('%f ' %v2[i,0])       #z
2529
2530        fid.write('\n')
2531    fid.close()
2532
2533
2534
2535def dat_variable_writer(filename, t, v0, v1, v2):
2536    """Store water height to file
2537    """
2538
2539    M, N = v0.shape
2540
2541    FN = create_filename(filename, 'dat', M, t)
2542    #print 'Writing to %s' %FN
2543
2544    fid = open(FN, 'w')
2545    for i in range(M):
2546        fid.write('%.4f ' %v0[i,0])
2547        fid.write('%.4f ' %v1[i,0])
2548        fid.write('%.4f ' %v2[i,0])
2549
2550        fid.write('\n')
2551    fid.close()
2552
2553
2554def read_sww(filename):
2555    """Read sww Net CDF file containing Shallow Water Wave simulation
2556
2557    The integer array volumes is of shape Nx3 where N is the number of
2558    triangles in the mesh.
2559
2560    Each entry in volumes is an index into the x,y arrays (the location).
2561
2562    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2563    number_of_timesteps, number_of_points.
2564
2565    The momentum is not always stored.
2566
2567    """
2568    from Scientific.IO.NetCDF import NetCDFFile
2569    print 'Reading from ', filename
2570    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2571#latitude, longitude
2572    # Get the variables as Numeric arrays
2573    x = fid.variables['x']             #x-coordinates of vertices
2574    y = fid.variables['y']             #y-coordinates of vertices
2575    z = fid.variables['elevation']     #Elevation
2576    time = fid.variables['time']       #Timesteps
2577    stage = fid.variables['stage']     #Water level
2578    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2579    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2580
2581    volumes = fid.variables['volumes'] #Connectivity
2582
2583
2584def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2585                 verbose=False):
2586    """Read Digitial Elevation model from the following NetCDF format (.dem)
2587
2588    Example:
2589
2590    ncols         3121
2591    nrows         1800
2592    xllcorner     722000
2593    yllcorner     5893000
2594    cellsize      25
2595    NODATA_value  -9999
2596    138.3698 137.4194 136.5062 135.5558 ..........
2597
2598    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2599    """
2600
2601    import os
2602    from Scientific.IO.NetCDF import NetCDFFile
2603    from Numeric import Float, zeros, sum, reshape, equal
2604
2605    root = basename_in
2606    inname = root + '.dem'
2607
2608    #Open existing netcdf file to read
2609    infile = NetCDFFile(inname, 'r')
2610    if verbose: print 'Reading DEM from %s' %inname
2611
2612    #Read metadata
2613    ncols = infile.ncols[0]
2614    nrows = infile.nrows[0]
2615    xllcorner = infile.xllcorner[0]
2616    yllcorner = infile.yllcorner[0]
2617    cellsize = infile.cellsize[0]
2618    NODATA_value = infile.NODATA_value[0]
2619    zone = infile.zone[0]
2620    false_easting = infile.false_easting[0]
2621    false_northing = infile.false_northing[0]
2622    projection = infile.projection
2623    datum = infile.datum
2624    units = infile.units
2625
2626    dem_elevation = infile.variables['elevation']
2627
2628    #Get output file name
2629    if basename_out == None:
2630        outname = root + '_' + repr(cellsize_new) + '.dem'
2631    else:
2632        outname = basename_out + '.dem'
2633
2634    if verbose: print 'Write decimated NetCDF file to %s' %outname
2635
2636    #Determine some dimensions for decimated grid
2637    (nrows_stencil, ncols_stencil) = stencil.shape
2638    x_offset = ncols_stencil / 2
2639    y_offset = nrows_stencil / 2
2640    cellsize_ratio = int(cellsize_new / cellsize)
2641    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2642    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2643
2644    #Open netcdf file for output
2645    outfile = NetCDFFile(outname, 'w')
2646
2647    #Create new file
2648    outfile.institution = 'Geoscience Australia'
2649    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2650                           'of spatial point data'
2651    #Georeferencing
2652    outfile.zone = zone
2653    outfile.projection = projection
2654    outfile.datum = datum
2655    outfile.units = units
2656
2657    outfile.cellsize = cellsize_new
2658    outfile.NODATA_value = NODATA_value
2659    outfile.false_easting = false_easting
2660    outfile.false_northing = false_northing
2661
2662    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2663    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2664    outfile.ncols = ncols_new
2665    outfile.nrows = nrows_new
2666
2667    # dimension definition
2668    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2669
2670    # variable definition
2671    outfile.createVariable('elevation', Float, ('number_of_points',))
2672
2673    # Get handle to the variable
2674    elevation = outfile.variables['elevation']
2675
2676    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2677
2678    #Store data
2679    global_index = 0
2680    for i in range(nrows_new):
2681        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2682        lower_index = global_index
2683        telev =  zeros(ncols_new, Float)
2684        local_index = 0
2685        trow = i * cellsize_ratio
2686
2687        for j in range(ncols_new):
2688            tcol = j * cellsize_ratio
2689            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2690
2691            #if dem contains 1 or more NODATA_values set value in
2692            #decimated dem to NODATA_value, else compute decimated
2693            #value using stencil
2694            if sum(sum(equal(tmp, NODATA_value))) > 0:
2695                telev[local_index] = NODATA_value
2696            else:
2697                telev[local_index] = sum(sum(tmp * stencil))
2698
2699            global_index += 1
2700            local_index += 1
2701
2702        upper_index = global_index
2703
2704        elevation[lower_index:upper_index] = telev
2705
2706    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2707
2708    infile.close()
2709    outfile.close()
2710
2711def sww2ers(basename_in, basename_out = None,
2712            quantity = None,
2713            timestep = None,
2714            reduction = None,
2715            cellsize = 10,
2716            verbose = False,
2717            origin = None,
2718            datum = 'WGS84'):
2719   
2720    """Read SWW file and convert to Digitial Elevation model format (.asc)
2721
2722    Example:
2723
2724    ncols         3121
2725    nrows         1800
2726    xllcorner     722000
2727    yllcorner     5893000
2728    cellsize      25
2729    NODATA_value  -9999
2730    138.3698 137.4194 136.5062 135.5558 ..........
2731
2732    Also write accompanying file with same basename_in but extension .prj
2733    used to fix the UTM zone, datum, false northings and eastings.
2734
2735    The prj format is assumed to be as
2736
2737    Projection    UTM
2738    Zone          56
2739    Datum         WGS84
2740    Zunits        NO
2741    Units         METERS
2742    Spheroid      WGS84
2743    Xshift        0.0000000000
2744    Yshift        10000000.0000000000
2745    Parameters
2746
2747
2748    if quantity is given, out values from quantity otherwise default to
2749    elevation
2750
2751    if timestep (an index) is given, output quantity at that timestep
2752
2753    if reduction is given use that to reduce quantity over all timesteps.
2754
2755    """
2756    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2757    import ermapper_grids
2758
2759    header = {}
2760    false_easting = 500000
2761    false_northing = 10000000
2762    NODATA_value = 0
2763
2764    if quantity is None:
2765        quantity = 'elevation'
2766
2767    if reduction is None:
2768        reduction = max
2769
2770    if basename_out is None:
2771        basename_out = basename_in + '_%s' %quantity
2772 
2773    swwfile = basename_in + '.sww'
2774    # Note the use of a .ers extension is optional (write_ermapper_grid will
2775    # deal with either option
2776    ersfile = basename_out
2777##    prjfile = basename_out + '.prj'
2778
2779
2780    if verbose: print 'Reading from %s' %swwfile
2781    #Read sww file
2782    from Scientific.IO.NetCDF import NetCDFFile
2783    fid = NetCDFFile(swwfile)
2784
2785    #Get extent and reference
2786    x = fid.variables['x'][:]
2787    y = fid.variables['y'][:]
2788    volumes = fid.variables['volumes'][:]
2789
2790    ymin = min(y); ymax = max(y)
2791    xmin = min(x); xmax = max(x)
2792
2793    number_of_timesteps = fid.dimensions['number_of_timesteps']
2794    number_of_points = fid.dimensions['number_of_points']
2795    if origin is None:
2796
2797        #Get geo_reference
2798        #sww files don't have to have a geo_ref
2799        try:
2800            geo_reference = Geo_reference(NetCDFObject=fid)
2801        except AttributeError, e:
2802            geo_reference = Geo_reference() #Default georef object
2803
2804        xllcorner = geo_reference.get_xllcorner()
2805        yllcorner = geo_reference.get_yllcorner()
2806        zone = geo_reference.get_zone()
2807    else:
2808        zone = origin[0]
2809        xllcorner = origin[1]
2810        yllcorner = origin[2]
2811
2812
2813    #Get quantity and reduce if applicable
2814    if verbose: print 'Reading quantity %s' %quantity
2815
2816    if quantity.lower() == 'depth':
2817        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2818    else:
2819        q = fid.variables[quantity][:]
2820
2821
2822    if len(q.shape) == 2:
2823        if verbose: print 'Reducing quantity %s' %quantity
2824        q_reduced = zeros( number_of_points, Float )
2825
2826        for k in range(number_of_points):
2827            q_reduced[k] = reduction( q[:,k] )
2828
2829        q = q_reduced
2830
2831    #Now q has dimension: number_of_points
2832
2833    #Create grid and update xll/yll corner and x,y
2834    if verbose: print 'Creating grid'
2835    ncols = int((xmax-xmin)/cellsize)+1
2836    nrows = int((ymax-ymin)/cellsize)+1
2837
2838    newxllcorner = xmin+xllcorner
2839    newyllcorner = ymin+yllcorner
2840
2841    x = x+xllcorner-newxllcorner
2842    y = y+yllcorner-newyllcorner
2843
2844    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2845    assert len(vertex_points.shape) == 2
2846
2847
2848    from Numeric import zeros, Float
2849    grid_points = zeros ( (ncols*nrows, 2), Float )
2850
2851
2852    for i in xrange(nrows):
2853##        yg = i*cellsize
2854        yg = (nrows-i)*cellsize # this will flip the order of the y values
2855        for j in xrange(ncols):
2856            xg = j*cellsize
2857            k = i*ncols + j
2858
2859##            print k, xg, yg
2860            grid_points[k,0] = xg
2861            grid_points[k,1] = yg
2862
2863    #Interpolate
2864    from least_squares import Interpolation
2865    from util import inside_polygon
2866
2867    #FIXME: This should be done with precrop = True, otherwise it'll
2868    #take forever. With expand_search  set to False, some grid points might
2869    #miss out....
2870
2871    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
2872                           precrop = False, expand_search = True,
2873                           verbose = verbose)
2874
2875    #Interpolate using quantity values
2876    if verbose: print 'Interpolating'
2877    grid_values = interp.interpolate(q).flat
2878    grid_values = reshape(grid_values,(nrows, ncols))
2879
2880
2881    # setup header information
2882    header['datum'] = '"' + datum + '"'
2883    # FIXME The use of hardwired UTM and zone number needs to be made optional
2884    # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2885    header['projection'] = '"UTM-' + str(zone) + '"'
2886    header['coordinatetype'] = 'EN'
2887    if header['coordinatetype'] == 'LL':
2888        header['longitude'] = str(newxllcorner)
2889        header['latitude'] = str(newyllcorner)
2890    elif header['coordinatetype'] == 'EN':
2891        header['eastings'] = str(newxllcorner)
2892        header['northings'] = str(newyllcorner)
2893    header['nullcellvalue'] = str(NODATA_value)
2894    header['xdimension'] = str(cellsize)
2895    header['ydimension'] = str(cellsize)
2896    header['value'] = '"' + quantity + '"'
2897
2898
2899    #Write
2900    if verbose: print 'Writing %s' %ersfile
2901    ermapper_grids.write_ermapper_grid(ersfile,grid_values, header)
2902
2903##    ascid = open(ascfile, 'w')
2904##
2905##    ascid.write('ncols         %d\n' %ncols)
2906##    ascid.write('nrows         %d\n' %nrows)
2907##    ascid.write('xllcorner     %d\n' %newxllcorner)
2908##    ascid.write('yllcorner     %d\n' %newyllcorner)
2909##    ascid.write('cellsize      %f\n' %cellsize)
2910##    ascid.write('NODATA_value  %d\n' %NODATA_value)
2911##
2912##
2913##    #Get bounding polygon from mesh
2914##    P = interp.mesh.get_boundary_polygon()
2915##    inside_indices = inside_polygon(grid_points, P)
2916##
2917##    for i in range(nrows):
2918##        if verbose and i%((nrows+10)/10)==0:
2919##            print 'Doing row %d of %d' %(i, nrows)
2920##
2921##        for j in range(ncols):
2922##            index = (nrows-i-1)*ncols+j
2923##
2924##            if sometrue(inside_indices == index):
2925##                ascid.write('%f ' %grid_values[index])
2926##            else:
2927##                ascid.write('%d ' %NODATA_value)
2928##
2929##        ascid.write('\n')
2930##
2931##    #Close
2932##    ascid.close()
2933##    fid.close()
Note: See TracBrowser for help on using the repository browser.