source: inundation/ga/storm_surge/pyvolution/data_manager.py @ 1690

Last change on this file since 1690 was 1690, checked in by ole, 19 years ago

Thoughts on gravity and minor cosmetic stuff

File size: 76.1 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII foramt of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18
19.dem: NetCDF representation of regular DEM data
20
21.tsh: ASCII format for storing meshes and associated boundary and region info
22.msh: NetCDF format for storing meshes and associated boundary and region info
23
24.nc: Native ferret NetCDF format
25.geo: Houdinis ascii geometry format (?)
26
27
28A typical dataflow can be described as follows
29
30Manually created files:
31ASC, PRJ:     Digital elevation models (gridded)
32TSH:          Triangular meshes (e.g. created from pmesh)
33NC            Model outputs for use as boundary conditions (e.g from MOST)
34
35
36AUTOMATICALLY CREATED FILES:
37
38ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
39
40NC -> SWW: Conversion of MOST bundary files to boundary sww
41
42PTS + TSH -> TSH with elevation: Least squares fit
43
44TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
45
46TSH + Boundary SWW -> SWW: Simluation using pyvolution
47
48"""
49
50from Numeric import concatenate
51
52from coordinate_transforms.geo_reference import Geo_reference
53
54def make_filename(s):
55    """Transform argument string into a suitable filename
56    """
57
58    s = s.strip()
59    s = s.replace(' ', '_')
60    s = s.replace('(', '')
61    s = s.replace(')', '')
62    s = s.replace('__', '_')
63
64    return s
65
66
67def check_dir(path, verbose=None):
68    """Check that specified path exists.
69    If path does not exist it will be created if possible
70
71    USAGE:
72       checkdir(path, verbose):
73
74    ARGUMENTS:
75        path -- Directory
76        verbose -- Flag verbose output (default: None)
77
78    RETURN VALUE:
79        Verified path including trailing separator
80
81    """
82
83    import os, sys
84    import os.path
85
86    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
87        unix = 0
88    else:
89        unix = 1
90
91
92    if path[-1] != os.sep:
93        path = path + os.sep  # Add separator for directories
94
95    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
96    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
97        try:
98            exitcode=os.mkdir(path)
99
100            # Change access rights if possible
101            #
102            if unix:
103                exitcode=os.system('chmod 775 '+path)
104            else:
105                pass  # FIXME: What about acces rights under Windows?
106
107            if verbose: print 'MESSAGE: Directory', path, 'created.'
108
109        except:
110            print 'WARNING: Directory', path, 'could not be created.'
111            if unix:
112                path = '/tmp/'
113            else:
114                path = 'C:'
115
116            print 'Using directory %s instead' %path
117
118    return(path)
119
120
121
122def del_dir(path):
123    """Recursively delete directory path and all its contents
124    """
125
126    import os
127
128    if os.path.isdir(path):
129        for file in os.listdir(path):
130            X = os.path.join(path, file)
131
132
133            if os.path.isdir(X) and not os.path.islink(X):
134                del_dir(X)
135            else:
136                try:
137                    os.remove(X)
138                except:
139                    print "Could not remove file %s" %X
140
141        os.rmdir(path)
142
143
144
145def create_filename(datadir, filename, format, size=None, time=None):
146
147    import os
148    #from config import data_dir
149
150    FN = check_dir(datadir) + filename
151
152    if size is not None:
153        FN += '_size%d' %size
154
155    if time is not None:
156        FN += '_time%.2f' %time
157
158    FN += '.' + format
159    return FN
160
161
162def get_files(datadir, filename, format, size):
163    """Get all file (names) with gven name, size and format
164    """
165
166    import glob
167
168    import os
169    #from config import data_dir
170
171    dir = check_dir(datadir)
172
173    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
174    return glob.glob(pattern)
175
176
177
178#Generic class for storing output to e.g. visualisation or checkpointing
179class Data_format:
180    """Generic interface to data formats
181    """
182
183
184    def __init__(self, domain, extension, mode = 'w'):
185        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
186                                        '''   'w' (write)'''+\
187                                        '''   'r' (read)''' +\
188                                        '''   'a' (append)'''
189
190        #Create filename
191        #self.filename = create_filename(domain.get_datadir(),
192        #domain.get_name(), extension, len(domain))
193
194
195        self.filename = create_filename(domain.get_datadir(),
196                        domain.get_name(), extension)
197
198        #print 'F', self.filename
199        self.timestep = 0
200        self.number_of_volumes = len(domain)
201        self.domain = domain
202
203
204        #FIXME: Should we have a general set_precision function?
205
206
207
208#Class for storing output to e.g. visualisation
209class Data_format_sww(Data_format):
210    """Interface to native NetCDF format (.sww) for storing model output
211
212    There are two kinds of data
213
214    1: Constant data: Vertex coordinates and field values. Stored once
215    2: Variable data: Conserved quantities. Stored once per timestep.
216
217    All data is assumed to reside at vertex locations.
218    """
219
220
221    def __init__(self, domain, mode = 'w',\
222                 max_size = 2000000000,
223                 recursion=False):
224        from Scientific.IO.NetCDF import NetCDFFile
225        from Numeric import Int, Float, Float32
226
227        self.precision = Float32 #Use single precision
228        if hasattr(domain, 'max_size'):
229            self.max_size = domain.max_size #file size max is 2Gig
230        else:
231            self.max_size = max_size
232        self.recursion = recursion
233        self.mode = mode
234
235        Data_format.__init__(self, domain, 'sww', mode)
236
237
238        # NetCDF file definition
239        fid = NetCDFFile(self.filename, mode)
240
241        if mode == 'w':
242
243            #Create new file
244            fid.institution = 'Geoscience Australia'
245            fid.description = 'Output from pyvolution suitable for plotting'
246
247            if domain.smooth:
248                fid.smoothing = 'Yes'
249            else:
250                fid.smoothing = 'No'
251
252            fid.order = domain.default_order
253           
254            if hasattr(domain, 'texture'):                 
255                fid.texture = domain.texture
256            #else:                         
257            #    fid.texture = 'None'       
258
259            #Reference point
260            #Start time in seconds since the epoch (midnight 1/1/1970)
261            #FIXME: Use Georef
262            fid.starttime = domain.starttime
263
264            # dimension definitions
265            fid.createDimension('number_of_volumes', self.number_of_volumes)
266            fid.createDimension('number_of_vertices', 3)
267
268            if domain.smooth is True:
269                fid.createDimension('number_of_points', len(domain.vertexlist))
270            else:
271                fid.createDimension('number_of_points', 3*self.number_of_volumes)
272
273            fid.createDimension('number_of_timesteps', None) #extensible
274
275            # variable definitions
276            fid.createVariable('x', self.precision, ('number_of_points',))
277            fid.createVariable('y', self.precision, ('number_of_points',))
278            fid.createVariable('elevation', self.precision, ('number_of_points',))
279            if domain.geo_reference is not None:
280                domain.geo_reference.write_NetCDF(fid)
281               
282            #FIXME: Backwards compatibility
283            fid.createVariable('z', self.precision, ('number_of_points',))
284            #################################
285
286            fid.createVariable('volumes', Int, ('number_of_volumes',
287                                                'number_of_vertices'))
288
289            fid.createVariable('time', self.precision,
290                               ('number_of_timesteps',))
291
292            fid.createVariable('stage', self.precision,
293                               ('number_of_timesteps',
294                                'number_of_points'))
295
296            fid.createVariable('xmomentum', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('ymomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304        #Close
305        fid.close()
306
307
308    def store_connectivity(self):
309        """Specialisation of store_connectivity for net CDF format
310
311        Writes x,y,z coordinates of triangles constituting
312        the bed elevation.
313        """
314
315        from Scientific.IO.NetCDF import NetCDFFile
316
317        from Numeric import concatenate, Int
318
319        domain = self.domain
320
321        #Get NetCDF
322        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
323
324        # Get the variables
325        x = fid.variables['x']
326        y = fid.variables['y']
327        z = fid.variables['elevation']
328
329        volumes = fid.variables['volumes']
330
331        # Get X, Y and bed elevation Z
332        Q = domain.quantities['elevation']
333        X,Y,Z,V = Q.get_vertex_values(xy=True,
334                                      precision = self.precision)
335
336
337
338        x[:] = X.astype(self.precision)
339        y[:] = Y.astype(self.precision)
340        z[:] = Z.astype(self.precision)
341
342        #FIXME: Backwards compatibility
343        z = fid.variables['z']
344        z[:] = Z.astype(self.precision)
345        ################################
346
347        volumes[:] = V.astype(volumes.typecode())
348
349        #Close
350        fid.close()
351
352    def store_timestep(self, names):
353        """Store time and named quantities to file
354        """
355        from Scientific.IO.NetCDF import NetCDFFile
356        import types
357        from time import sleep
358        from os import stat
359
360
361        #Get NetCDF
362        retries = 0
363        file_open = False
364        while not file_open and retries < 10:
365            try:
366                fid = NetCDFFile(self.filename, 'a') #Open existing file
367            except IOError:
368                #This could happen if someone was reading the file.
369                #In that case, wait a while and try again
370                msg = 'Warning (store_timestep): File %s could not be opened'\
371                      %self.filename
372                msg += ' - trying step %s again' %self.domain.time
373                print msg
374                retries += 1
375                sleep(1)
376            else:
377                file_open = True
378
379        if not file_open:
380            msg = 'File %s could not be opened for append' %self.filename
381            raise msg
382
383
384
385        #Check to see if the file is already too big:
386        time = fid.variables['time']
387        i = len(time)+1
388        file_size = stat(self.filename)[6]
389        file_size_increase =  file_size/i
390        if file_size + file_size_increase > self.max_size*(2**self.recursion):
391            #in order to get the file name and start time correct,
392            #I change the domian.filename and domain.starttime.
393            #This is the only way to do this without changing
394            #other modules (I think).
395
396            #write a filename addon that won't break swollens reader
397            #(10.sww is bad)
398            filename_ext = '_time_%s'%self.domain.time
399            filename_ext = filename_ext.replace('.', '_')
400            #remember the old filename, then give domain a
401            #name with the extension
402            old_domain_filename = self.domain.filename
403            if not self.recursion:
404                self.domain.filename = self.domain.filename+filename_ext
405
406            #change the domain starttime to the current time
407            old_domain_starttime = self.domain.starttime
408            self.domain.starttime = self.domain.time
409
410            #build a new data_structure.
411            next_data_structure=\
412                Data_format_sww(self.domain, mode=self.mode,\
413                                max_size = self.max_size,\
414                                recursion = self.recursion+1)
415            if not self.recursion:
416                print '    file_size = %s'%file_size
417                print '    saving file to %s'%next_data_structure.filename
418            #set up the new data_structure
419            self.domain.writer = next_data_structure
420
421            #FIXME - could be cleaner to use domain.store_timestep etc.
422            next_data_structure.store_connectivity()
423            next_data_structure.store_timestep(names)
424            fid.sync()
425            fid.close()
426
427            #restore the old starttime and filename
428            self.domain.starttime = old_domain_starttime
429            self.domain.filename = old_domain_filename
430        else:
431            self.recursion = False
432            domain = self.domain
433
434            # Get the variables
435            time = fid.variables['time']
436            stage = fid.variables['stage']
437            xmomentum = fid.variables['xmomentum']
438            ymomentum = fid.variables['ymomentum']
439            i = len(time)
440
441            #Store time
442            time[i] = self.domain.time
443
444
445            if type(names) not in [types.ListType, types.TupleType]:
446                names = [names]
447
448            for name in names:
449                # Get quantity
450                Q = domain.quantities[name]
451                A,V = Q.get_vertex_values(xy=False,
452                                      precision = self.precision)
453
454                #FIXME: Make this general (see below)
455                if name == 'stage':
456                    stage[i,:] = A.astype(self.precision)
457                elif name == 'xmomentum':
458                    xmomentum[i,:] = A.astype(self.precision)
459                elif name == 'ymomentum':
460                    ymomentum[i,:] = A.astype(self.precision)
461
462                #As in....
463                #eval( name + '[i,:] = A.astype(self.precision)' )
464                #FIXME: But we need a UNIT test for that before refactoring
465
466
467
468            #Flush and close
469            fid.sync()
470            fid.close()
471
472
473
474#Class for handling checkpoints data
475class Data_format_cpt(Data_format):
476    """Interface to native NetCDF format (.cpt)
477    """
478
479
480    def __init__(self, domain, mode = 'w'):
481        from Scientific.IO.NetCDF import NetCDFFile
482        from Numeric import Int, Float, Float
483
484        self.precision = Float #Use full precision
485
486        Data_format.__init__(self, domain, 'sww', mode)
487
488
489        # NetCDF file definition
490        fid = NetCDFFile(self.filename, mode)
491
492        if mode == 'w':
493            #Create new file
494            fid.institution = 'Geoscience Australia'
495            fid.description = 'Checkpoint data'
496            #fid.smooth = domain.smooth
497            fid.order = domain.default_order
498
499            # dimension definitions
500            fid.createDimension('number_of_volumes', self.number_of_volumes)
501            fid.createDimension('number_of_vertices', 3)
502
503            #Store info at all vertices (no smoothing)
504            fid.createDimension('number_of_points', 3*self.number_of_volumes)
505            fid.createDimension('number_of_timesteps', None) #extensible
506
507            # variable definitions
508
509            #Mesh
510            fid.createVariable('x', self.precision, ('number_of_points',))
511            fid.createVariable('y', self.precision, ('number_of_points',))
512
513
514            fid.createVariable('volumes', Int, ('number_of_volumes',
515                                                'number_of_vertices'))
516
517            fid.createVariable('time', self.precision,
518                               ('number_of_timesteps',))
519
520            #Allocate space for all quantities
521            for name in domain.quantities.keys():
522                fid.createVariable(name, self.precision,
523                                   ('number_of_timesteps',
524                                    'number_of_points'))
525
526        #Close
527        fid.close()
528
529
530    def store_checkpoint(self):
531        """
532        Write x,y coordinates of triangles.
533        Write connectivity (
534        constituting
535        the bed elevation.
536        """
537
538        from Scientific.IO.NetCDF import NetCDFFile
539
540        from Numeric import concatenate
541
542        domain = self.domain
543
544        #Get NetCDF
545        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
546
547        # Get the variables
548        x = fid.variables['x']
549        y = fid.variables['y']
550
551        volumes = fid.variables['volumes']
552
553        # Get X, Y and bed elevation Z
554        Q = domain.quantities['elevation']
555        X,Y,Z,V = Q.get_vertex_values(xy=True,
556                                      precision = self.precision)
557
558
559
560        x[:] = X.astype(self.precision)
561        y[:] = Y.astype(self.precision)
562        z[:] = Z.astype(self.precision)
563
564        volumes[:] = V
565
566        #Close
567        fid.close()
568
569
570    def store_timestep(self, name):
571        """Store time and named quantity to file
572        """
573        from Scientific.IO.NetCDF import NetCDFFile
574        from time import sleep
575
576        #Get NetCDF
577        retries = 0
578        file_open = False
579        while not file_open and retries < 10:
580            try:
581                fid = NetCDFFile(self.filename, 'a') #Open existing file
582            except IOError:
583                #This could happen if someone was reading the file.
584                #In that case, wait a while and try again
585                msg = 'Warning (store_timestep): File %s could not be opened'\
586                          %self.filename
587                msg += ' - trying again'
588                print msg
589                retries += 1
590                sleep(1)
591            else:
592                file_open = True
593
594            if not file_open:
595                msg = 'File %s could not be opened for append' %self.filename
596            raise msg
597
598
599        domain = self.domain
600
601        # Get the variables
602        time = fid.variables['time']
603        stage = fid.variables['stage']
604        i = len(time)
605
606        #Store stage
607        time[i] = self.domain.time
608
609        # Get quantity
610        Q = domain.quantities[name]
611        A,V = Q.get_vertex_values(xy=False,
612                                  precision = self.precision)
613
614        stage[i,:] = A.astype(self.precision)
615
616        #Flush and close
617        fid.sync()
618        fid.close()
619
620
621
622
623
624#Function for storing xya output
625#FIXME Not done yet for this version
626class Data_format_xya(Data_format):
627    """Generic interface to data formats
628    """
629
630
631    def __init__(self, domain, mode = 'w'):
632        from Scientific.IO.NetCDF import NetCDFFile
633        from Numeric import Int, Float, Float32
634
635        self.precision = Float32 #Use single precision
636
637        Data_format.__init__(self, domain, 'xya', mode)
638
639
640
641    #FIXME -This is the old xya format
642    def store_all(self):
643        """Specialisation of store all for xya format
644
645        Writes x,y,z coordinates of triangles constituting
646        the bed elevation.
647        """
648
649        from Numeric import concatenate
650
651        domain = self.domain
652
653        fd = open(self.filename, 'w')
654
655
656        if domain.smooth is True:
657            number_of_points =  len(domain.vertexlist)
658        else:
659            number_of_points = 3*self.number_of_volumes
660
661        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
662
663        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
664                 " # <vertex #> <x> <y> [attributes]" + "\n")
665
666
667        # Get X, Y, bed elevation and friction (index=0,1)
668        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
669                                           indices = (0,1), precision = self.precision)
670
671        bed_eles = A[:,0]
672        fricts = A[:,1]
673
674        # Get stage (index=0)
675        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
676                                       indices = (0,), precision = self.precision)
677
678        stages = B[:,0]
679
680        #<vertex #> <x> <y> [attributes]
681        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
682                                               stages, fricts):
683
684            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
685            fd.write(s)
686
687        #close
688        fd.close()
689
690
691    def store_timestep(self, t, V0, V1, V2):
692        """Store time, water heights (and momentums) to file
693        """
694        pass
695
696
697#Auxiliary
698def write_obj(filename,x,y,z):
699    """Store x,y,z vectors into filename (obj format)
700       Vectors are assumed to have dimension (M,3) where
701       M corresponds to the number elements.
702       triangles are assumed to be disconnected
703
704       The three numbers in each vector correspond to three vertices,
705
706       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
707
708    """
709    #print 'Writing obj to %s' % filename
710
711    import os.path
712
713    root, ext = os.path.splitext(filename)
714    if ext == '.obj':
715        FN = filename
716    else:
717        FN = filename + '.obj'
718
719
720    outfile = open(FN, 'wb')
721    outfile.write("# Triangulation as an obj file\n")
722
723    M, N = x.shape
724    assert N==3  #Assuming three vertices per element
725
726    for i in range(M):
727        for j in range(N):
728            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
729
730    for i in range(M):
731        base = i*N
732        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
733
734    outfile.close()
735
736
737#########################################################
738#Conversion routines
739########################################################
740
741def sww2obj(basefilename, size):
742    """Convert netcdf based data output to obj
743    """
744    from Scientific.IO.NetCDF import NetCDFFile
745
746    from Numeric import Float, zeros
747
748    #Get NetCDF
749    FN = create_filename('.', basefilename, 'sww', size)
750    print 'Reading from ', FN
751    fid = NetCDFFile(FN, 'r')  #Open existing file for read
752
753
754    # Get the variables
755    x = fid.variables['x']
756    y = fid.variables['y']
757    z = fid.variables['elevation']
758    time = fid.variables['time']
759    stage = fid.variables['stage']
760
761    M = size  #Number of lines
762    xx = zeros((M,3), Float)
763    yy = zeros((M,3), Float)
764    zz = zeros((M,3), Float)
765
766    for i in range(M):
767        for j in range(3):
768            xx[i,j] = x[i+j*M]
769            yy[i,j] = y[i+j*M]
770            zz[i,j] = z[i+j*M]
771
772    #Write obj for bathymetry
773    FN = create_filename('.', basefilename, 'obj', size)
774    write_obj(FN,xx,yy,zz)
775
776
777    #Now read all the data with variable information, combine with
778    #x,y info and store as obj
779
780    for k in range(len(time)):
781        t = time[k]
782        print 'Processing timestep %f' %t
783
784        for i in range(M):
785            for j in range(3):
786                zz[i,j] = stage[k,i+j*M]
787
788
789        #Write obj for variable data
790        #FN = create_filename(basefilename, 'obj', size, time=t)
791        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
792        write_obj(FN,xx,yy,zz)
793
794
795def dat2obj(basefilename):
796    """Convert line based data output to obj
797    FIXME: Obsolete?
798    """
799
800    import glob, os
801    from config import data_dir
802
803
804    #Get bathymetry and x,y's
805    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
806
807    from Numeric import zeros, Float
808
809    M = len(lines)  #Number of lines
810    x = zeros((M,3), Float)
811    y = zeros((M,3), Float)
812    z = zeros((M,3), Float)
813
814    ##i = 0
815    for i, line in enumerate(lines):
816        tokens = line.split()
817        values = map(float,tokens)
818
819        for j in range(3):
820            x[i,j] = values[j*3]
821            y[i,j] = values[j*3+1]
822            z[i,j] = values[j*3+2]
823
824        ##i += 1
825
826
827    #Write obj for bathymetry
828    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
829
830
831    #Now read all the data files with variable information, combine with
832    #x,y info
833    #and store as obj
834
835    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
836
837    for filename in files:
838        print 'Processing %s' % filename
839
840        lines = open(data_dir+os.sep+filename,'r').readlines()
841        assert len(lines) == M
842        root, ext = os.path.splitext(filename)
843
844        #Get time from filename
845        i0 = filename.find('_time=')
846        if i0 == -1:
847            #Skip bathymetry file
848            continue
849
850        i0 += 6  #Position where time starts
851        i1 = filename.find('.dat')
852
853        if i1 > i0:
854            t = float(filename[i0:i1])
855        else:
856            raise 'Hmmmm'
857
858
859
860        ##i = 0
861        for i, line in enumerate(lines):
862            tokens = line.split()
863            values = map(float,tokens)
864
865            for j in range(3):
866                z[i,j] = values[j]
867
868            ##i += 1
869
870        #Write obj for variable data
871        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
872
873
874def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
875    """Read netcdf filename1, pick timesteps first:step:last and save to
876    nettcdf file filename2
877    """
878    from Scientific.IO.NetCDF import NetCDFFile
879
880    #Get NetCDF
881    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
882    outfile = NetCDFFile(filename2, 'w')  #Open new file
883
884
885    #Copy dimensions
886    for d in infile.dimensions:
887        outfile.createDimension(d, infile.dimensions[d])
888
889    for name in infile.variables:
890        var = infile.variables[name]
891        outfile.createVariable(name, var.typecode(), var.dimensions)
892
893
894    #Copy the static variables
895    for name in infile.variables:
896        if name == 'time' or name == 'stage':
897            pass
898        else:
899            #Copy
900            outfile.variables[name][:] = infile.variables[name][:]
901
902    #Copy selected timesteps
903    time = infile.variables['time']
904    stage = infile.variables['stage']
905
906    newtime = outfile.variables['time']
907    newstage = outfile.variables['stage']
908
909    if last is None:
910        last = len(time)
911
912    selection = range(first, last, step)
913    for i, j in enumerate(selection):
914        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
915        newtime[i] = time[j]
916        newstage[i,:] = stage[j,:]
917
918    #Close
919    infile.close()
920    outfile.close()
921
922
923#Get data objects
924def get_dataobject(domain, mode='w'):
925    """Return instance of class of given format using filename
926    """
927
928    cls = eval('Data_format_%s' %domain.format)
929    return cls(domain, mode)
930
931def xya2pts(basename_in, basename_out=None, verbose=False,
932            #easting_min=None, easting_max=None,
933            #northing_min=None, northing_max=None,
934            attribute_name = 'elevation',
935            z_func = None):
936    """Read points data from ascii (.xya)
937
938    Example:
939
940              x(m)        y(m)        z(m)
941         0.00000e+00  0.00000e+00  1.3535000e-01
942         0.00000e+00  1.40000e-02  1.3535000e-01
943
944   
945
946    Convert to NetCDF pts format which is
947
948    points:  (Nx2) Float array
949    elevation: N Float array
950
951    Only lines that contain three numeric values are processed
952
953    If z_func is specified, it will be applied to the third field
954    """
955
956    import os 
957    from Scientific.IO.NetCDF import NetCDFFile
958    from Numeric import Float, arrayrange, concatenate
959
960    root, ext = os.path.splitext(basename_in)
961
962    if ext == '': ext = '.xya'
963
964    #Get NetCDF
965    infile = open(root + ext, 'r')  #Open existing xya file for read
966
967    if verbose: print 'Reading xya points from %s' %(root + ext)
968
969    points = []
970    attribute = []
971    for i, line in enumerate(infile.readlines()):
972        fields = line.split()
973
974        try:
975            assert len(fields) == 3
976        except:   
977            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
978
979        try:
980            x = float( fields[0] )
981            y = float( fields[1] )
982            z = float( fields[2] )           
983        except:
984            continue
985
986        points.append( [x, y] )
987
988        if callable(z_func):
989            attribute.append(z_func(z))           
990        else:   
991            attribute.append(z)
992
993
994    #Get output file
995    if basename_out == None:
996        ptsname = root + '.pts'
997    else:
998        ptsname = basename_out + '.pts'
999
1000    if verbose: print 'Store to NetCDF file %s' %ptsname
1001    # NetCDF file definition
1002    outfile = NetCDFFile(ptsname, 'w')
1003
1004    #Create new file
1005    outfile.institution = 'Geoscience Australia'
1006    outfile.description = 'NetCDF pts format for compact and '\
1007                          'portable storage of spatial point data'
1008   
1009
1010    #Georeferencing
1011    from coordinate_transforms.geo_reference import Geo_reference
1012    Geo_reference().write_NetCDF(outfile)
1013   
1014
1015    outfile.createDimension('number_of_points', len(points))
1016    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1017
1018    # variable definitions
1019    outfile.createVariable('points', Float, ('number_of_points',
1020                                             'number_of_dimensions'))
1021    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1022
1023    # Get handles to the variables
1024    nc_points = outfile.variables['points']
1025    nc_attribute = outfile.variables[attribute_name]
1026
1027    #Store data
1028    nc_points[:, :] = points
1029    nc_attribute[:] = attribute
1030
1031    infile.close()
1032    outfile.close()
1033   
1034def dem2pts(basename_in, basename_out=None, verbose=False,
1035            easting_min=None, easting_max=None,
1036            northing_min=None, northing_max=None):
1037    """Read Digitial Elevation model from the following NetCDF format (.dem)
1038
1039    Example:
1040
1041    ncols         3121
1042    nrows         1800
1043    xllcorner     722000
1044    yllcorner     5893000
1045    cellsize      25
1046    NODATA_value  -9999
1047    138.3698 137.4194 136.5062 135.5558 ..........
1048
1049    Convert to NetCDF pts format which is
1050
1051    points:  (Nx2) Float array
1052    elevation: N Float array
1053    """
1054
1055    import os
1056    from Scientific.IO.NetCDF import NetCDFFile
1057    from Numeric import Float, zeros
1058
1059    root = basename_in
1060
1061    #Get NetCDF
1062    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1063
1064    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1065
1066    ncols = infile.ncols[0]
1067    nrows = infile.nrows[0]
1068    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1069    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1070    cellsize = infile.cellsize[0]
1071    NODATA_value = infile.NODATA_value[0]
1072    dem_elevation = infile.variables['elevation']
1073
1074    zone = infile.zone[0]
1075    false_easting = infile.false_easting[0]
1076    false_northing = infile.false_northing[0]
1077
1078    #Text strings
1079    projection = infile.projection
1080    datum = infile.datum
1081    units = infile.units
1082
1083
1084    #Get output file
1085    if basename_out == None:
1086        ptsname = root + '.pts'
1087    else:
1088        ptsname = basename_out + '.pts'
1089
1090    if verbose: print 'Store to NetCDF file %s' %ptsname
1091    # NetCDF file definition
1092    outfile = NetCDFFile(ptsname, 'w')
1093
1094    #Create new file
1095    outfile.institution = 'Geoscience Australia'
1096    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1097                      'of spatial point data'
1098    #assign default values
1099    if easting_min is None: easting_min = xllcorner
1100    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1101    if northing_min is None: northing_min = yllcorner
1102    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1103
1104    #compute offsets to update georeferencing
1105    easting_offset = xllcorner - easting_min
1106    northing_offset = yllcorner - northing_min
1107
1108    #Georeferencing
1109    outfile.zone = zone
1110    outfile.xllcorner = easting_min #Easting of lower left corner
1111    outfile.yllcorner = northing_min #Northing of lower left corner
1112    outfile.false_easting = false_easting
1113    outfile.false_northing = false_northing
1114
1115    outfile.projection = projection
1116    outfile.datum = datum
1117    outfile.units = units
1118
1119
1120    #Grid info (FIXME: probably not going to be used, but heck)
1121    outfile.ncols = ncols
1122    outfile.nrows = nrows
1123
1124
1125    # dimension definitions
1126    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1127    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1128    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1129    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1130
1131    # variable definitions
1132    outfile.createVariable('points', Float, ('number_of_points',
1133                                             'number_of_dimensions'))
1134    outfile.createVariable('elevation', Float, ('number_of_points',))
1135
1136    # Get handles to the variables
1137    points = outfile.variables['points']
1138    elevation = outfile.variables['elevation']
1139
1140    #Store data
1141    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1142    global_index = 0
1143    for i in range(nrows):
1144        if verbose: print 'Processing row %d of %d' %(i, nrows)
1145        lower_index = global_index
1146        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1147        telev =  zeros(ncols_in_bounding_box, Float)
1148        local_index = 0
1149
1150        y = (nrows-i)*cellsize + yllcorner
1151        for j in range(ncols):
1152
1153            x = j*cellsize + xllcorner
1154            if easting_min <= x <= easting_max and \
1155               northing_min <= y <= northing_max:
1156                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1157                telev[local_index] = dem_elevation[i, j]
1158                global_index += 1
1159                local_index += 1
1160
1161        upper_index = global_index
1162
1163        if upper_index == lower_index + ncols_in_bounding_box:
1164            points[lower_index:upper_index, :] = tpoints
1165            elevation[lower_index:upper_index] = telev
1166
1167    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1168
1169    infile.close()
1170    outfile.close()
1171
1172
1173def sww2asc(basename_in, basename_out = None,
1174            quantity = None,
1175            timestep = None,
1176            reduction = None,
1177            cellsize = 10,
1178            verbose = False,
1179            origin = None):
1180    """Read SWW file and convert to Digitial Elevation model format (.asc)
1181
1182    Example:
1183
1184    ncols         3121
1185    nrows         1800
1186    xllcorner     722000
1187    yllcorner     5893000
1188    cellsize      25
1189    NODATA_value  -9999
1190    138.3698 137.4194 136.5062 135.5558 ..........
1191
1192    Also write accompanying file with same basename_in but extension .prj
1193    used to fix the UTM zone, datum, false northings and eastings.
1194
1195    The prj format is assumed to be as
1196
1197    Projection    UTM
1198    Zone          56
1199    Datum         WGS84
1200    Zunits        NO
1201    Units         METERS
1202    Spheroid      WGS84
1203    Xshift        0.0000000000
1204    Yshift        10000000.0000000000
1205    Parameters
1206
1207
1208    if quantity is given, out values from quantity otherwise default to
1209    elevation
1210
1211    if timestep (an index) is given, output quantity at that timestep
1212
1213    if reduction is given use that to reduce quantity over all timesteps.
1214
1215    """
1216    from Numeric import array, Float, concatenate, NewAxis, zeros,\
1217         sometrue
1218
1219
1220    #FIXME: Should be variable
1221    datum = 'WGS84'
1222    false_easting = 500000
1223    false_northing = 10000000
1224    NODATA_value = -9999
1225
1226    if quantity is None:
1227        quantity = 'elevation'
1228
1229    if reduction is None:
1230        reduction = max
1231
1232    if basename_out is None:
1233        basename_out = basename_in + '_%s' %quantity
1234
1235    swwfile = basename_in + '.sww'
1236    ascfile = basename_out + '.asc'
1237    prjfile = basename_out + '.prj'
1238
1239
1240    if verbose: print 'Reading from %s' %swwfile
1241    #Read sww file
1242    from Scientific.IO.NetCDF import NetCDFFile
1243    fid = NetCDFFile(swwfile)
1244
1245    #Get extent and reference
1246    x = fid.variables['x'][:]
1247    y = fid.variables['y'][:]
1248    volumes = fid.variables['volumes'][:]
1249
1250    ymin = min(y); ymax = max(y)
1251    xmin = min(x); xmax = max(x)
1252
1253    number_of_timesteps = fid.dimensions['number_of_timesteps']
1254    number_of_points = fid.dimensions['number_of_points']
1255    if origin is None:
1256
1257        #Get geo_reference
1258        #sww files don't have to have a geo_ref
1259        try:
1260            geo_reference = Geo_reference(NetCDFObject=fid)
1261        except AttributeError, e:
1262            geo_reference = Geo_reference() #Default georef object
1263
1264        xllcorner = geo_reference.get_xllcorner()
1265        yllcorner = geo_reference.get_yllcorner()
1266        zone = geo_reference.get_zone()
1267    else:
1268        zone = origin[0]
1269        xllcorner = origin[1]
1270        yllcorner = origin[2]
1271
1272
1273    #Get quantity and reduce if applicable
1274    if verbose: print 'Reading quantity %s' %quantity
1275
1276    if quantity.lower() == 'depth':
1277        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
1278    else:
1279        q = fid.variables[quantity][:]
1280
1281
1282    if len(q.shape) == 2:
1283        if verbose: print 'Reducing quantity %s' %quantity
1284        q_reduced = zeros( number_of_points, Float )
1285
1286        for k in range(number_of_points):
1287            q_reduced[k] = reduction( q[:,k] )
1288
1289        q = q_reduced
1290
1291    #Now q has dimension: number_of_points
1292
1293
1294    #Write prj file
1295    if verbose: print 'Writing %s' %prjfile
1296    prjid = open(prjfile, 'w')
1297    prjid.write('Projection    %s\n' %'UTM')
1298    prjid.write('Zone          %d\n' %zone)
1299    prjid.write('Datum         %s\n' %datum)
1300    prjid.write('Zunits        NO\n')
1301    prjid.write('Units         METERS\n')
1302    prjid.write('Spheroid      %s\n' %datum)
1303    prjid.write('Xshift        %d\n' %false_easting)
1304    prjid.write('Yshift        %d\n' %false_northing)
1305    prjid.write('Parameters\n')
1306    prjid.close()
1307
1308    #Create grid and update xll/yll corner and x,y
1309    if verbose: print 'Creating grid'
1310    ncols = int((xmax-xmin)/cellsize)+1
1311    nrows = int((ymax-ymin)/cellsize)+1
1312
1313    newxllcorner = xmin+xllcorner
1314    newyllcorner = ymin+yllcorner
1315
1316    x = x+xllcorner-newxllcorner
1317    y = y+yllcorner-newyllcorner
1318
1319    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1320    assert len(vertex_points.shape) == 2
1321
1322
1323    from Numeric import zeros, Float
1324    grid_points = zeros ( (ncols*nrows, 2), Float )
1325
1326
1327    for i in xrange(nrows):
1328        yg = i*cellsize
1329        for j in xrange(ncols):
1330            xg = j*cellsize
1331            k = i*ncols + j
1332
1333            #print k, xg, yg
1334            grid_points[k,0] = xg
1335            grid_points[k,1] = yg
1336
1337
1338    #Interpolate
1339
1340    from least_squares import Interpolation
1341    from util import inside_polygon
1342
1343    #FIXME: This should be done with precrop = True, otherwise it'll
1344    #take forever. With expand_search  set to False, some grid points might
1345    #miss out....
1346
1347    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1348                           precrop = False, expand_search = False,
1349                           verbose = verbose)
1350
1351    #Interpolate using quantity values
1352    if verbose: print 'Interpolating'
1353    grid_values = interp.interpolate(q).flat
1354
1355    #Write
1356    if verbose: print 'Writing %s' %ascfile
1357    ascid = open(ascfile, 'w')
1358
1359    ascid.write('ncols         %d\n' %ncols)
1360    ascid.write('nrows         %d\n' %nrows)
1361    ascid.write('xllcorner     %d\n' %newxllcorner)
1362    ascid.write('yllcorner     %d\n' %newyllcorner)
1363    ascid.write('cellsize      %f\n' %cellsize)
1364    ascid.write('NODATA_value  %d\n' %NODATA_value)
1365
1366
1367    #Get bounding polygon from mesh
1368    P = interp.mesh.get_boundary_polygon()
1369    inside_indices = inside_polygon(grid_points, P)
1370
1371    for i in range(nrows):
1372        if verbose and i%((nrows+10)/10)==0:
1373            print 'Doing row %d of %d' %(i, nrows)
1374
1375        for j in range(ncols):
1376            index = (nrows-i-1)*ncols+j
1377
1378            if sometrue(inside_indices == index):
1379                ascid.write('%f ' %grid_values[index])
1380            else:
1381                ascid.write('%d ' %NODATA_value)
1382
1383        ascid.write('\n')
1384
1385    #Close
1386    ascid.close()
1387    fid.close()
1388
1389
1390def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1391                                  verbose=False):
1392    """Read Digitial Elevation model from the following ASCII format (.asc)
1393
1394    Example:
1395
1396    ncols         3121
1397    nrows         1800
1398    xllcorner     722000
1399    yllcorner     5893000
1400    cellsize      25
1401    NODATA_value  -9999
1402    138.3698 137.4194 136.5062 135.5558 ..........
1403
1404    Convert basename_in + '.asc' to NetCDF format (.dem)
1405    mimicking the ASCII format closely.
1406
1407
1408    An accompanying file with same basename_in but extension .prj must exist
1409    and is used to fix the UTM zone, datum, false northings and eastings.
1410
1411    The prj format is assumed to be as
1412
1413    Projection    UTM
1414    Zone          56
1415    Datum         WGS84
1416    Zunits        NO
1417    Units         METERS
1418    Spheroid      WGS84
1419    Xshift        0.0000000000
1420    Yshift        10000000.0000000000
1421    Parameters
1422    """
1423
1424    import os
1425    from Scientific.IO.NetCDF import NetCDFFile
1426    from Numeric import Float, array
1427
1428    #root, ext = os.path.splitext(basename_in)
1429    root = basename_in
1430
1431    ###########################################
1432    # Read Meta data
1433    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1434    metadatafile = open(root + '.prj')
1435    metalines = metadatafile.readlines()
1436    metadatafile.close()
1437
1438    L = metalines[0].strip().split()
1439    assert L[0].strip().lower() == 'projection'
1440    projection = L[1].strip()                   #TEXT
1441
1442    L = metalines[1].strip().split()
1443    assert L[0].strip().lower() == 'zone'
1444    zone = int(L[1].strip())
1445
1446    L = metalines[2].strip().split()
1447    assert L[0].strip().lower() == 'datum'
1448    datum = L[1].strip()                        #TEXT
1449
1450    L = metalines[3].strip().split()
1451    assert L[0].strip().lower() == 'zunits'     #IGNORE
1452    zunits = L[1].strip()                       #TEXT
1453
1454    L = metalines[4].strip().split()
1455    assert L[0].strip().lower() == 'units'
1456    units = L[1].strip()                        #TEXT
1457
1458    L = metalines[5].strip().split()
1459    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1460    spheroid = L[1].strip()                     #TEXT
1461
1462    L = metalines[6].strip().split()
1463    assert L[0].strip().lower() == 'xshift'
1464    false_easting = float(L[1].strip())
1465
1466    L = metalines[7].strip().split()
1467    assert L[0].strip().lower() == 'yshift'
1468    false_northing = float(L[1].strip())
1469
1470    #print false_easting, false_northing, zone, datum
1471
1472
1473    ###########################################
1474    #Read DEM data
1475
1476    datafile = open(basename_in + '.asc')
1477
1478    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1479    lines = datafile.readlines()
1480    datafile.close()
1481
1482    if verbose: print 'Got', len(lines), ' lines'
1483
1484    ncols = int(lines[0].split()[1].strip())
1485    nrows = int(lines[1].split()[1].strip())
1486    xllcorner = float(lines[2].split()[1].strip())
1487    yllcorner = float(lines[3].split()[1].strip())
1488    cellsize = float(lines[4].split()[1].strip())
1489    NODATA_value = int(lines[5].split()[1].strip())
1490
1491    assert len(lines) == nrows + 6
1492
1493
1494    ##########################################
1495
1496
1497    if basename_out == None:
1498        netcdfname = root + '.dem'
1499    else:
1500        netcdfname = basename_out + '.dem'
1501
1502    if verbose: print 'Store to NetCDF file %s' %netcdfname
1503    # NetCDF file definition
1504    fid = NetCDFFile(netcdfname, 'w')
1505
1506    #Create new file
1507    fid.institution = 'Geoscience Australia'
1508    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1509                      'of spatial point data'
1510
1511    fid.ncols = ncols
1512    fid.nrows = nrows
1513    fid.xllcorner = xllcorner
1514    fid.yllcorner = yllcorner
1515    fid.cellsize = cellsize
1516    fid.NODATA_value = NODATA_value
1517
1518    fid.zone = zone
1519    fid.false_easting = false_easting
1520    fid.false_northing = false_northing
1521    fid.projection = projection
1522    fid.datum = datum
1523    fid.units = units
1524
1525
1526    # dimension definitions
1527    fid.createDimension('number_of_rows', nrows)
1528    fid.createDimension('number_of_columns', ncols)
1529
1530    # variable definitions
1531    fid.createVariable('elevation', Float, ('number_of_rows',
1532                                            'number_of_columns'))
1533
1534    # Get handles to the variables
1535    elevation = fid.variables['elevation']
1536
1537    #Store data
1538    for i, line in enumerate(lines[6:]):
1539        fields = line.split()
1540        if verbose: print 'Processing row %d of %d' %(i, nrows)
1541
1542        elevation[i, :] = array([float(x) for x in fields])
1543
1544    fid.close()
1545
1546
1547
1548def ferret2sww(basename_in, basename_out = None,
1549               verbose = False,
1550               minlat = None, maxlat = None,
1551               minlon = None, maxlon = None,
1552               mint = None, maxt = None, mean_stage = 0,
1553               origin = None, zscale = 1,
1554               fail_on_NaN = True,
1555               NaN_filler = 0,
1556               elevation = None,
1557               inverted_bathymetry = False
1558               ): #FIXME: Bathymetry should be obtained
1559                                  #from MOST somehow.
1560                                  #Alternatively from elsewhere
1561                                  #or, as a last resort,
1562                                  #specified here.
1563                                  #The value of -100 will work
1564                                  #for the Wollongong tsunami
1565                                  #scenario but is very hacky
1566    """Convert 'Ferret' NetCDF format for wave propagation to
1567    sww format native to pyvolution.
1568
1569    Specify only basename_in and read files of the form
1570    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1571    relative height, x-velocity and y-velocity, respectively.
1572
1573    Also convert latitude and longitude to UTM. All coordinates are
1574    assumed to be given in the GDA94 datum.
1575
1576    min's and max's: If omitted - full extend is used.
1577    To include a value min may equal it, while max must exceed it.
1578    Lat and lon are assuemd to be in decimal degrees
1579
1580    origin is a 3-tuple with geo referenced
1581    UTM coordinates (zone, easting, northing)
1582
1583    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1584    which means that longitude is the fastest
1585    varying dimension (row major order, so to speak)
1586
1587    ferret2sww uses grid points as vertices in a triangular grid
1588    counting vertices from lower left corner upwards, then right
1589    """
1590
1591    import os
1592    from Scientific.IO.NetCDF import NetCDFFile
1593    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1594    precision = Float
1595
1596
1597    #Get NetCDF data
1598    if verbose: print 'Reading files %s_*.nc' %basename_in
1599    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1600    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1601    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1602    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1603
1604    if basename_out is None:
1605        swwname = basename_in + '.sww'
1606    else:
1607        swwname = basename_out + '.sww'
1608
1609    times = file_h.variables['TIME']
1610    latitudes = file_h.variables['LAT']
1611    longitudes = file_h.variables['LON']
1612
1613    if mint == None:
1614        jmin = 0
1615    else:
1616        jmin = searchsorted(times, mint)
1617
1618    if maxt == None:
1619        jmax=len(times)
1620    else:
1621        jmax = searchsorted(times, maxt)
1622
1623    if minlat == None:
1624        kmin=0
1625    else:
1626        kmin = searchsorted(latitudes, minlat)
1627
1628    if maxlat == None:
1629        kmax = len(latitudes)
1630    else:
1631        kmax = searchsorted(latitudes, maxlat)
1632
1633    if minlon == None:
1634        lmin=0
1635    else:
1636        lmin = searchsorted(longitudes, minlon)
1637
1638    if maxlon == None:
1639        lmax = len(longitudes)
1640    else:
1641        lmax = searchsorted(longitudes, maxlon)
1642
1643
1644
1645    times = times[jmin:jmax]
1646    latitudes = latitudes[kmin:kmax]
1647    longitudes = longitudes[lmin:lmax]
1648
1649
1650    if verbose: print 'cropping'
1651    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1652    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1653    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1654    elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1655
1656#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1657#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1658#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1659#        from Numeric import asarray
1660#        elevations=elevations.tolist()
1661#        elevations.reverse()
1662#        elevations=asarray(elevations)
1663#    else:
1664#        from Numeric import asarray
1665#        elevations=elevations.tolist()
1666#        elevations.reverse()
1667#        elevations=asarray(elevations)
1668#        'print hmmm'
1669
1670
1671
1672    #Get missing values
1673    nan_ha = file_h.variables['HA'].missing_value[0]
1674    nan_ua = file_u.variables['UA'].missing_value[0]
1675    nan_va = file_v.variables['VA'].missing_value[0]
1676    if hasattr(file_e.variables['ELEVATION'],'missing_value'):
1677        nan_e  = file_e.variables['ELEVATION'].missing_value[0]
1678    else:
1679        nan_e = None
1680
1681    #Cleanup
1682    from Numeric import sometrue
1683
1684    missing = (amplitudes == nan_ha)
1685    if sometrue (missing):
1686        if fail_on_NaN:
1687            msg = 'NetCDFFile %s contains missing values'\
1688                  %(basename_in+'_ha.nc')
1689            raise msg
1690        else:
1691            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1692
1693    missing = (uspeed == nan_ua)
1694    if sometrue (missing):
1695        if fail_on_NaN:
1696            msg = 'NetCDFFile %s contains missing values'\
1697                  %(basename_in+'_ua.nc')
1698            raise msg
1699        else:
1700            uspeed = uspeed*(missing==0) + missing*NaN_filler
1701
1702    missing = (vspeed == nan_va)
1703    if sometrue (missing):
1704        if fail_on_NaN:
1705            msg = 'NetCDFFile %s contains missing values'\
1706                  %(basename_in+'_va.nc')
1707            raise msg
1708        else:
1709            vspeed = vspeed*(missing==0) + missing*NaN_filler
1710
1711
1712    missing = (elevations == nan_e)
1713    if sometrue (missing):
1714        if fail_on_NaN:
1715            msg = 'NetCDFFile %s contains missing values'\
1716                  %(basename_in+'_e.nc')
1717            raise msg
1718        else:
1719            elevations = elevations*(missing==0) + missing*NaN_filler
1720
1721    #######
1722
1723
1724
1725    number_of_times = times.shape[0]
1726    number_of_latitudes = latitudes.shape[0]
1727    number_of_longitudes = longitudes.shape[0]
1728
1729    assert amplitudes.shape[0] == number_of_times
1730    assert amplitudes.shape[1] == number_of_latitudes
1731    assert amplitudes.shape[2] == number_of_longitudes
1732
1733    if verbose:
1734        print '------------------------------------------------'
1735        print 'Statistics:'
1736        print '  Extent (lat/lon):'
1737        print '    lat in [%f, %f], len(lat) == %d'\
1738              %(min(latitudes.flat), max(latitudes.flat),
1739                len(latitudes.flat))
1740        print '    lon in [%f, %f], len(lon) == %d'\
1741              %(min(longitudes.flat), max(longitudes.flat),
1742                len(longitudes.flat))
1743        print '    t in [%f, %f], len(t) == %d'\
1744              %(min(times.flat), max(times.flat), len(times.flat))
1745
1746        q = amplitudes.flat
1747        name = 'Amplitudes (ha) [cm]'
1748        print %s in [%f, %f]' %(name, min(q), max(q))
1749
1750        q = uspeed.flat
1751        name = 'Speeds (ua) [cm/s]'
1752        print %s in [%f, %f]' %(name, min(q), max(q))
1753
1754        q = vspeed.flat
1755        name = 'Speeds (va) [cm/s]'
1756        print %s in [%f, %f]' %(name, min(q), max(q))
1757
1758        q = elevations.flat
1759        name = 'Elevations (e) [m]'
1760        print %s in [%f, %f]' %(name, min(q), max(q))
1761
1762
1763    #print number_of_latitudes, number_of_longitudes
1764    number_of_points = number_of_latitudes*number_of_longitudes
1765    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1766
1767
1768    file_h.close()
1769    file_u.close()
1770    file_v.close()
1771    file_e.close()
1772
1773
1774    # NetCDF file definition
1775    outfile = NetCDFFile(swwname, 'w')
1776
1777    #Create new file
1778    outfile.institution = 'Geoscience Australia'
1779    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1780                          %(basename_in + '_ha.nc',
1781                            basename_in + '_ua.nc',
1782                            basename_in + '_va.nc',
1783                            basename_in + '_e.nc')
1784
1785
1786    #For sww compatibility
1787    outfile.smoothing = 'Yes'
1788    outfile.order = 1
1789
1790    #Start time in seconds since the epoch (midnight 1/1/1970)
1791    outfile.starttime = starttime = times[0]
1792    times = times - starttime  #Store relative times
1793
1794    # dimension definitions
1795    outfile.createDimension('number_of_volumes', number_of_volumes)
1796
1797    outfile.createDimension('number_of_vertices', 3)
1798    outfile.createDimension('number_of_points', number_of_points)
1799
1800
1801    #outfile.createDimension('number_of_timesteps', len(times))
1802    outfile.createDimension('number_of_timesteps', len(times))
1803
1804    # variable definitions
1805    outfile.createVariable('x', precision, ('number_of_points',))
1806    outfile.createVariable('y', precision, ('number_of_points',))
1807    outfile.createVariable('elevation', precision, ('number_of_points',))
1808
1809    #FIXME: Backwards compatibility
1810    outfile.createVariable('z', precision, ('number_of_points',))
1811    #################################
1812
1813    outfile.createVariable('volumes', Int, ('number_of_volumes',
1814                                            'number_of_vertices'))
1815
1816    outfile.createVariable('time', precision,
1817                           ('number_of_timesteps',))
1818
1819    outfile.createVariable('stage', precision,
1820                           ('number_of_timesteps',
1821                            'number_of_points'))
1822
1823    outfile.createVariable('xmomentum', precision,
1824                           ('number_of_timesteps',
1825                            'number_of_points'))
1826
1827    outfile.createVariable('ymomentum', precision,
1828                           ('number_of_timesteps',
1829                            'number_of_points'))
1830
1831
1832    #Store
1833    from coordinate_transforms.redfearn import redfearn
1834    x = zeros(number_of_points, Float)  #Easting
1835    y = zeros(number_of_points, Float)  #Northing
1836
1837
1838    if verbose: print 'Making triangular grid'
1839    #Check zone boundaries
1840    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
1841
1842    vertices = {}
1843    i = 0
1844    for k, lat in enumerate(latitudes):       #Y direction
1845        for l, lon in enumerate(longitudes):  #X direction
1846
1847            vertices[l,k] = i
1848
1849            zone, easting, northing = redfearn(lat,lon)
1850
1851            msg = 'Zone boundary crossed at longitude =', lon
1852            #assert zone == refzone, msg
1853            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
1854            x[i] = easting
1855            y[i] = northing
1856            i += 1
1857
1858
1859    #Construct 2 triangles per 'rectangular' element
1860    volumes = []
1861    for l in range(number_of_longitudes-1):    #X direction
1862        for k in range(number_of_latitudes-1): #Y direction
1863            v1 = vertices[l,k+1]
1864            v2 = vertices[l,k]
1865            v3 = vertices[l+1,k+1]
1866            v4 = vertices[l+1,k]
1867
1868            volumes.append([v1,v2,v3]) #Upper element
1869            volumes.append([v4,v3,v2]) #Lower element
1870
1871    volumes = array(volumes)
1872
1873    if origin == None:
1874        zone = refzone
1875        xllcorner = min(x)
1876        yllcorner = min(y)
1877    else:
1878        zone = origin[0]
1879        xllcorner = origin[1]
1880        yllcorner = origin[2]
1881
1882
1883    outfile.xllcorner = xllcorner
1884    outfile.yllcorner = yllcorner
1885    outfile.zone = zone
1886
1887
1888    if elevation is not None:
1889        z = elevation
1890    else:
1891        if inverted_bathymetry:
1892            z = -1*elevations
1893        else:
1894            z = elevations
1895        #FIXME: z should be obtained from MOST and passed in here
1896
1897    from Numeric import resize
1898    z = resize(z,outfile.variables['z'][:].shape)
1899    outfile.variables['x'][:] = x - xllcorner
1900    outfile.variables['y'][:] = y - yllcorner
1901    outfile.variables['z'][:] = z
1902    outfile.variables['elevation'][:] = z  #FIXME HACK
1903    outfile.variables['time'][:] = times   #Store time relative
1904    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
1905
1906
1907
1908    #Time stepping
1909    stage = outfile.variables['stage']
1910    xmomentum = outfile.variables['xmomentum']
1911    ymomentum = outfile.variables['ymomentum']
1912
1913    if verbose: print 'Converting quantities'
1914    n = len(times)
1915    for j in range(n):
1916        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
1917        i = 0
1918        for k in range(number_of_latitudes):      #Y direction
1919            for l in range(number_of_longitudes): #X direction
1920                w = zscale*amplitudes[j,k,l]/100 + mean_stage
1921                stage[j,i] = w
1922                h = w - z[i]
1923                xmomentum[j,i] = uspeed[j,k,l]/100*h
1924                ymomentum[j,i] = vspeed[j,k,l]/100*h
1925                i += 1
1926
1927
1928    if verbose:
1929        x = outfile.variables['x'][:]
1930        y = outfile.variables['y'][:]
1931        print '------------------------------------------------'
1932        print 'Statistics of output file:'
1933        print '  Name: %s' %swwname
1934        print '  Reference:'
1935        print '    Lower left corner: [%f, %f]'\
1936              %(xllcorner, yllcorner)
1937        print '    Start time: %f' %starttime
1938        print '  Extent:'
1939        print '    x [m] in [%f, %f], len(x) == %d'\
1940              %(min(x.flat), max(x.flat), len(x.flat))
1941        print '    y [m] in [%f, %f], len(y) == %d'\
1942              %(min(y.flat), max(y.flat), len(y.flat))
1943        print '    t [s] in [%f, %f], len(t) == %d'\
1944              %(min(times), max(times), len(times))
1945        print '  Quantities [SI units]:'
1946        for name in ['stage', 'xmomentum', 'ymomentum']:
1947            q = outfile.variables[name][:].flferret2swwat
1948            print '    %s in [%f, %f]' %(name, min(q), max(q))
1949
1950
1951
1952
1953    outfile.close()
1954
1955
1956
1957
1958def timefile2swww(filename, quantity_names = None):
1959    """Template for converting typical text files with time series to
1960    NetCDF sww file.
1961
1962
1963    The file format is assumed to be either two fields separated by a comma:
1964
1965        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
1966
1967    E.g
1968
1969      31/08/04 00:00:00, 1.328223 0 0
1970      31/08/04 00:15:00, 1.292912 0 0
1971
1972    will provide a time dependent function f(t) with three attributes
1973
1974    filename is assumed to be the rootname with extenisons .txt and .sww
1975    """
1976
1977    import time, calendar
1978    from Numeric import array
1979    from config import time_format
1980    from util import ensure_numeric
1981
1982
1983    fid = open(filename + '.txt')
1984    line = fid.readline()
1985    fid.close()
1986
1987    fields = line.split(',')
1988    msg = 'File %s must have the format date, value0 value1 value2 ...'
1989    assert len(fields) == 2, msg
1990
1991    try:
1992        starttime = calendar.timegm(time.strptime(fields[0], time_format))
1993    except ValueError:
1994        msg = 'First field in file %s must be' %filename
1995        msg += ' date-time with format %s.\n' %time_format
1996        msg += 'I got %s instead.' %fields[0]
1997        raise msg
1998
1999
2000    #Split values
2001    values = []
2002    for value in fields[1].split():
2003        values.append(float(value))
2004
2005    q = ensure_numeric(values)
2006
2007    msg = 'ERROR: File must contain at least one independent value'
2008    assert len(q.shape) == 1, msg
2009
2010
2011
2012    #Read times proper
2013    from Numeric import zeros, Float, alltrue
2014    from config import time_format
2015    import time, calendar
2016
2017    fid = open(filename + '.txt')
2018    lines = fid.readlines()
2019    fid.close()
2020
2021    N = len(lines)
2022    d = len(q)
2023
2024    T = zeros(N, Float)       #Time
2025    Q = zeros((N, d), Float)  #Values
2026
2027    for i, line in enumerate(lines):
2028        fields = line.split(',')
2029        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2030       
2031        T[i] = realtime - starttime
2032
2033        for j, value in enumerate(fields[1].split()):
2034            Q[i, j] = float(value)
2035
2036    msg = 'File %s must list time as a monotonuosly ' %filename
2037    msg += 'increasing sequence'
2038    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2039
2040
2041    #Create sww file
2042    from Scientific.IO.NetCDF import NetCDFFile
2043
2044       
2045
2046    fid = NetCDFFile(filename + '.sww', 'w')
2047
2048
2049    fid.institution = 'Geoscience Australia'
2050    fid.description = 'Time series'
2051
2052
2053    #Reference point
2054    #Start time in seconds since the epoch (midnight 1/1/1970)
2055    #FIXME: Use Georef
2056    fid.starttime = starttime
2057   
2058   
2059    # dimension definitions
2060    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2061    #fid.createDimension('number_of_vertices', 3)
2062
2063
2064    fid.createDimension('number_of_timesteps', len(T))
2065
2066    #if geo_reference is not None:
2067    #    geo_reference.write_NetCDF(fid)
2068               
2069
2070    fid.createVariable('time', Float, ('number_of_timesteps',))
2071
2072    fid.variables['time'][:] = T
2073   
2074    for i in range(Q.shape[1]):
2075        try:
2076            name = quantity_names[i]
2077        except:   
2078            name = 'Attribute%d'%i
2079           
2080        fid.createVariable(name, Float, ('number_of_timesteps',))
2081        fid.variables[name][:] = Q[:,i]       
2082
2083    fid.close()
2084
2085
2086def extent_sww(file_name):
2087    """
2088    Read in an sww file.
2089
2090    Input;
2091    file_name - the sww file
2092
2093    Output;
2094    z - Vector of bed elevation
2095    volumes - Array.  Each row has 3 values, representing
2096    the vertices that define the volume
2097    time - Vector of the times where there is stage information
2098    stage - array with respect to time and vertices (x,y)
2099    """
2100
2101
2102    from Scientific.IO.NetCDF import NetCDFFile
2103
2104    #Check contents
2105    #Get NetCDF
2106    fid = NetCDFFile(file_name, 'r')
2107
2108    # Get the variables
2109    x = fid.variables['x'][:]
2110    y = fid.variables['y'][:]
2111    stage = fid.variables['stage'][:]
2112    #print "stage",stage
2113    #print "stage.shap",stage.shape
2114    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2115    #print "min(stage)",min(stage)
2116
2117    fid.close()
2118
2119    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2120
2121
2122def sww2domain(filename,boundary=None,t=None,\
2123               fail_if_NaN=True,NaN_filler=0\
2124               ,verbose = True,very_verbose = False):
2125    """
2126    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2127
2128    Boundary is not recommended if domian.smooth is not selected, as it
2129    uses unique coordinates, but not unique boundaries. This means that
2130    the boundary file will not be compatable with the coordiantes, and will
2131    give a different final boundary, or crash.
2132    """
2133    NaN=9.969209968386869e+036
2134    #initialise NaN.
2135
2136    from Scientific.IO.NetCDF import NetCDFFile
2137    from shallow_water import Domain
2138    from Numeric import asarray, transpose, resize
2139
2140    if verbose: print 'Reading from ', filename
2141    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2142    time = fid.variables['time']       #Timesteps
2143    if t is None:
2144        t = time[-1]
2145    time_interp = get_time_interp(time,t)
2146
2147    # Get the variables as Numeric arrays
2148    x = fid.variables['x'][:]             #x-coordinates of vertices
2149    y = fid.variables['y'][:]             #y-coordinates of vertices
2150    elevation = fid.variables['elevation']     #Elevation
2151    stage = fid.variables['stage']     #Water level
2152    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2153    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2154
2155    starttime = fid.starttime[0]
2156    volumes = fid.variables['volumes'][:] #Connectivity
2157    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2158
2159    conserved_quantities = []
2160    interpolated_quantities = {}
2161    other_quantities = []
2162
2163    # get geo_reference
2164    #sww files don't have to have a geo_ref
2165    try:
2166        geo_reference = Geo_reference(NetCDFObject=fid)
2167    except: #AttributeError, e:
2168        geo_reference = None
2169
2170    if verbose: print '    getting quantities'
2171    for quantity in fid.variables.keys():
2172        dimensions = fid.variables[quantity].dimensions
2173        if 'number_of_timesteps' in dimensions:
2174            conserved_quantities.append(quantity)
2175            interpolated_quantities[quantity]=\
2176                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2177        else: other_quantities.append(quantity)
2178
2179    other_quantities.remove('x')
2180    other_quantities.remove('y')
2181    other_quantities.remove('z')
2182    other_quantities.remove('volumes')
2183
2184    conserved_quantities.remove('time')
2185
2186    if verbose: print '    building domain'
2187#    From domain.Domain:
2188#    domain = Domain(coordinates, volumes,\
2189#                    conserved_quantities = conserved_quantities,\
2190#                    other_quantities = other_quantities,zone=zone,\
2191#                    xllcorner=xllcorner, yllcorner=yllcorner)
2192
2193#   From shallow_water.Domain:
2194    coordinates=coordinates.tolist()
2195    volumes=volumes.tolist()
2196    #FIXME:should this be in mesh?(peter row)
2197    if fid.smoothing == 'Yes': unique = False
2198    else: unique = True
2199    if unique:
2200        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2201
2202
2203    domain = Domain(coordinates, volumes, boundary)
2204
2205    if not boundary is None:
2206        domain.boundary = boundary
2207
2208    domain.geo_reference = geo_reference
2209
2210    domain.starttime=float(starttime)+float(t)
2211    domain.time=0.0
2212
2213    for quantity in other_quantities:
2214        try:
2215            NaN = fid.variables[quantity].missing_value
2216        except:
2217            pass #quantity has no missing_value number
2218        X = fid.variables[quantity][:]
2219        if very_verbose:
2220            print '       ',quantity
2221            print '        NaN =',NaN
2222            print '        max(X)'
2223            print '       ',max(X)
2224            print '        max(X)==NaN'
2225            print '       ',max(X)==NaN
2226            print ''
2227        if (max(X)==NaN) or (min(X)==NaN):
2228            if fail_if_NaN:
2229                msg = 'quantity "%s" contains no_data entry'%quantity
2230                raise msg
2231            else:
2232                data = (X<>NaN)
2233                X = (X*data)+(data==0)*NaN_filler
2234        if unique:
2235            X = resize(X,(len(X)/3,3))
2236        domain.set_quantity(quantity,X)
2237#
2238    for quantity in conserved_quantities:
2239        try:
2240            NaN = fid.variables[quantity].missing_value
2241        except:
2242            pass #quantity has no missing_value number
2243        X = interpolated_quantities[quantity]
2244        if very_verbose:
2245            print '       ',quantity
2246            print '        NaN =',NaN
2247            print '        max(X)'
2248            print '       ',max(X)
2249            print '        max(X)==NaN'
2250            print '       ',max(X)==NaN
2251            print ''
2252        if (max(X)==NaN) or (min(X)==NaN):
2253            if fail_if_NaN:
2254                msg = 'quantity "%s" contains no_data entry'%quantity
2255                raise msg
2256            else:
2257                data = (X<>NaN)
2258                X = (X*data)+(data==0)*NaN_filler
2259        if unique:
2260            X = resize(X,(X.shape[0]/3,3))
2261        domain.set_quantity(quantity,X)
2262    fid.close()
2263    return domain
2264
2265def interpolated_quantity(saved_quantity,time_interp):
2266
2267    #given an index and ratio, interpolate quantity with respect to time.
2268    index,ratio = time_interp
2269    Q = saved_quantity
2270    if ratio > 0:
2271        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2272    else:
2273        q = Q[index]
2274    #Return vector of interpolated values
2275    return q
2276
2277def get_time_interp(time,t=None):
2278    #Finds the ratio and index for time interpolation.
2279    #It is borrowed from previous pyvolution code.
2280    if t is None:
2281        t=time[-1]
2282        index = -1
2283        ratio = 0.
2284    else:
2285        T = time
2286        tau = t
2287        index=0
2288        msg = 'Time interval derived from file %s [%s:%s]'\
2289            %('FIXMEfilename', T[0], T[-1])
2290        msg += ' does not match model time: %s' %tau
2291        if tau < time[0]: raise msg
2292        if tau > time[-1]: raise msg
2293        while tau > time[index]: index += 1
2294        while tau < time[index]: index -= 1
2295        if tau == time[index]:
2296            #Protect against case where tau == time[-1] (last time)
2297            # - also works in general when tau == time[i]
2298            ratio = 0
2299        else:
2300            #t is now between index and index+1
2301            ratio = (tau - time[index])/(time[index+1] - time[index])
2302    return (index,ratio)
2303
2304
2305def weed(coordinates,volumes,boundary = None):
2306    if type(coordinates)=='array':
2307        coordinates = coordinates.tolist()
2308    if type(volumes)=='array':
2309        volumes = volumes.tolist()
2310
2311    unique = False
2312    point_dict = {}
2313    same_point = {}
2314    for i in range(len(coordinates)):
2315        point = tuple(coordinates[i])
2316        if point_dict.has_key(point):
2317            unique = True
2318            same_point[i]=point
2319            #to change all point i references to point j
2320        else:
2321            point_dict[point]=i
2322            same_point[i]=point
2323
2324    coordinates = []
2325    i = 0
2326    for point in point_dict.keys():
2327        point = tuple(point)
2328        coordinates.append(list(point))
2329        point_dict[point]=i
2330        i+=1
2331
2332
2333    for volume in volumes:
2334        for i in range(len(volume)):
2335            index = volume[i]
2336            if index>-1:
2337                volume[i]=point_dict[same_point[index]]
2338
2339    new_boundary = {}
2340    if not boundary is None:
2341        for segment in boundary.keys():
2342            point0 = point_dict[same_point[segment[0]]]
2343            point1 = point_dict[same_point[segment[1]]]
2344            label = boundary[segment]
2345            #FIXME should the bounday attributes be concaterated
2346            #('exterior, pond') or replaced ('pond')(peter row)
2347
2348            if new_boundary.has_key((point0,point1)):
2349                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2350                                              #+','+label
2351
2352            elif new_boundary.has_key((point1,point0)):
2353                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2354                                              #+','+label
2355            else: new_boundary[(point0,point1)]=label
2356
2357        boundary = new_boundary
2358
2359    return coordinates,volumes,boundary
2360
2361
2362
2363#OBSOLETE STUFF
2364#Native checkpoint format.
2365#Information needed to recreate a state is preserved
2366#FIXME: Rethink and maybe use netcdf format
2367def cpt_variable_writer(filename, t, v0, v1, v2):
2368    """Store all conserved quantities to file
2369    """
2370
2371    M, N = v0.shape
2372
2373    FN = create_filename(filename, 'cpt', M, t)
2374    #print 'Writing to %s' %FN
2375
2376    fid = open(FN, 'w')
2377    for i in range(M):
2378        for j in range(N):
2379            fid.write('%.16e ' %v0[i,j])
2380        for j in range(N):
2381            fid.write('%.16e ' %v1[i,j])
2382        for j in range(N):
2383            fid.write('%.16e ' %v2[i,j])
2384
2385        fid.write('\n')
2386    fid.close()
2387
2388
2389def cpt_variable_reader(filename, t, v0, v1, v2):
2390    """Store all conserved quantities to file
2391    """
2392
2393    M, N = v0.shape
2394
2395    FN = create_filename(filename, 'cpt', M, t)
2396    #print 'Reading from %s' %FN
2397
2398    fid = open(FN)
2399
2400
2401    for i in range(M):
2402        values = fid.readline().split() #Get one line
2403
2404        for j in range(N):
2405            v0[i,j] = float(values[j])
2406            v1[i,j] = float(values[3+j])
2407            v2[i,j] = float(values[6+j])
2408
2409    fid.close()
2410
2411def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2412    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2413    elevation.
2414    Not in use pt
2415    """
2416
2417    M, N = v0.shape
2418
2419    print X0
2420    import sys; sys.exit()
2421    FN = create_filename(filename, 'cpt', M)
2422    print 'Writing to %s' %FN
2423
2424    fid = open(FN, 'w')
2425    for i in range(M):
2426        for j in range(2):
2427            fid.write('%.16e ' %X0[i,j])   #x, y
2428        for j in range(N):
2429            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2430
2431        for j in range(2):
2432            fid.write('%.16e ' %X1[i,j])   #x, y
2433        for j in range(N):
2434            fid.write('%.16e ' %v1[i,j])
2435
2436        for j in range(2):
2437            fid.write('%.16e ' %X2[i,j])   #x, y
2438        for j in range(N):
2439            fid.write('%.16e ' %v2[i,j])
2440
2441        fid.write('\n')
2442    fid.close()
2443
2444
2445
2446#Function for storing out to e.g. visualisation
2447#FIXME: Do we want this?
2448#FIXME: Not done yet for this version
2449def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2450    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2451    """
2452
2453    M, N = v0.shape
2454
2455    FN = create_filename(filename, 'dat', M)
2456    #print 'Writing to %s' %FN
2457
2458    fid = open(FN, 'w')
2459    for i in range(M):
2460        for j in range(2):
2461            fid.write('%f ' %X0[i,j])   #x, y
2462        fid.write('%f ' %v0[i,0])       #z
2463
2464        for j in range(2):
2465            fid.write('%f ' %X1[i,j])   #x, y
2466        fid.write('%f ' %v1[i,0])       #z
2467
2468        for j in range(2):
2469            fid.write('%f ' %X2[i,j])   #x, y
2470        fid.write('%f ' %v2[i,0])       #z
2471
2472        fid.write('\n')
2473    fid.close()
2474
2475
2476
2477def dat_variable_writer(filename, t, v0, v1, v2):
2478    """Store water height to file
2479    """
2480
2481    M, N = v0.shape
2482
2483    FN = create_filename(filename, 'dat', M, t)
2484    #print 'Writing to %s' %FN
2485
2486    fid = open(FN, 'w')
2487    for i in range(M):
2488        fid.write('%.4f ' %v0[i,0])
2489        fid.write('%.4f ' %v1[i,0])
2490        fid.write('%.4f ' %v2[i,0])
2491
2492        fid.write('\n')
2493    fid.close()
2494
2495
2496def read_sww(filename):
2497    """Read sww Net CDF file containing Shallow Water Wave simulation
2498
2499    The integer array volumes is of shape Nx3 where N is the number of
2500    triangles in the mesh.
2501
2502    Each entry in volumes is an index into the x,y arrays (the location).
2503
2504    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2505    number_of_timesteps, number_of_points.
2506
2507    The momentum is not always stored.
2508
2509    """
2510    from Scientific.IO.NetCDF import NetCDFFile
2511    print 'Reading from ', filename
2512    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2513#latitude, longitude
2514    # Get the variables as Numeric arrays
2515    x = fid.variables['x']             #x-coordinates of vertices
2516    y = fid.variables['y']             #y-coordinates of vertices
2517    z = fid.variables['elevation']     #Elevation
2518    time = fid.variables['time']       #Timesteps
2519    stage = fid.variables['stage']     #Water level
2520    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2521    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2522
2523    volumes = fid.variables['volumes'] #Connectivity
2524
2525
2526def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2527                 verbose=False):
2528    """Read Digitial Elevation model from the following NetCDF format (.dem)
2529
2530    Example:
2531
2532    ncols         3121
2533    nrows         1800
2534    xllcorner     722000
2535    yllcorner     5893000
2536    cellsize      25
2537    NODATA_value  -9999
2538    138.3698 137.4194 136.5062 135.5558 ..........
2539
2540    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2541    """
2542
2543    import os
2544    from Scientific.IO.NetCDF import NetCDFFile
2545    from Numeric import Float, zeros, sum, reshape, equal
2546
2547    root = basename_in
2548    inname = root + '.dem'
2549
2550    #Open existing netcdf file to read
2551    infile = NetCDFFile(inname, 'r')
2552    if verbose: print 'Reading DEM from %s' %inname
2553
2554    #Read metadata
2555    ncols = infile.ncols[0]
2556    nrows = infile.nrows[0]
2557    xllcorner = infile.xllcorner[0]
2558    yllcorner = infile.yllcorner[0]
2559    cellsize = infile.cellsize[0]
2560    NODATA_value = infile.NODATA_value[0]
2561    zone = infile.zone[0]
2562    false_easting = infile.false_easting[0]
2563    false_northing = infile.false_northing[0]
2564    projection = infile.projection
2565    datum = infile.datum
2566    units = infile.units
2567
2568    dem_elevation = infile.variables['elevation']
2569
2570    #Get output file name
2571    if basename_out == None:
2572        outname = root + '_' + repr(cellsize_new) + '.dem'
2573    else:
2574        outname = basename_out + '.dem'
2575
2576    if verbose: print 'Write decimated NetCDF file to %s' %outname
2577
2578    #Determine some dimensions for decimated grid
2579    (nrows_stencil, ncols_stencil) = stencil.shape
2580    x_offset = ncols_stencil / 2
2581    y_offset = nrows_stencil / 2
2582    cellsize_ratio = int(cellsize_new / cellsize)
2583    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2584    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2585
2586    #Open netcdf file for output
2587    outfile = NetCDFFile(outname, 'w')
2588
2589    #Create new file
2590    outfile.institution = 'Geoscience Australia'
2591    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2592                           'of spatial point data'
2593    #Georeferencing
2594    outfile.zone = zone
2595    outfile.projection = projection
2596    outfile.datum = datum
2597    outfile.units = units
2598
2599    outfile.cellsize = cellsize_new
2600    outfile.NODATA_value = NODATA_value
2601    outfile.false_easting = false_easting
2602    outfile.false_northing = false_northing
2603
2604    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2605    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2606    outfile.ncols = ncols_new
2607    outfile.nrows = nrows_new
2608
2609    # dimension definition
2610    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2611
2612    # variable definition
2613    outfile.createVariable('elevation', Float, ('number_of_points',))
2614
2615    # Get handle to the variable
2616    elevation = outfile.variables['elevation']
2617
2618    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2619
2620    #Store data
2621    global_index = 0
2622    for i in range(nrows_new):
2623        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2624        lower_index = global_index
2625        telev =  zeros(ncols_new, Float)
2626        local_index = 0
2627        trow = i * cellsize_ratio
2628
2629        for j in range(ncols_new):
2630            tcol = j * cellsize_ratio
2631            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2632
2633            #if dem contains 1 or more NODATA_values set value in
2634            #decimated dem to NODATA_value, else compute decimated
2635            #value using stencil
2636            if sum(sum(equal(tmp, NODATA_value))) > 0:
2637                telev[local_index] = NODATA_value
2638            else:
2639                telev[local_index] = sum(sum(tmp * stencil))
2640
2641            global_index += 1
2642            local_index += 1
2643
2644        upper_index = global_index
2645
2646        elevation[lower_index:upper_index] = telev
2647
2648    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2649
2650    infile.close()
2651    outfile.close()
2652
Note: See TracBrowser for help on using the repository browser.