source: inundation/pyvolution/data_manager.py @ 1740

Last change on this file since 1740 was 1740, checked in by ole, 18 years ago

Recovered data_manager as it was on 18 Aug and test_data_manager as it was on 16 Aug

File size: 76.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII foramt of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18
19.dem: NetCDF representation of regular DEM data
20
21.tsh: ASCII format for storing meshes and associated boundary and region info
22.msh: NetCDF format for storing meshes and associated boundary and region info
23
24.nc: Native ferret NetCDF format
25.geo: Houdinis ascii geometry format (?)
26
27
28A typical dataflow can be described as follows
29
30Manually created files:
31ASC, PRJ:     Digital elevation models (gridded)
32TSH:          Triangular meshes (e.g. created from pmesh)
33NC            Model outputs for use as boundary conditions (e.g from MOST)
34
35
36AUTOMATICALLY CREATED FILES:
37
38ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
39
40NC -> SWW: Conversion of MOST bundary files to boundary sww
41
42PTS + TSH -> TSH with elevation: Least squares fit
43
44TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
45
46TSH + Boundary SWW -> SWW: Simluation using pyvolution
47
48"""
49
50from Numeric import concatenate
51
52from coordinate_transforms.geo_reference import Geo_reference
53
54def make_filename(s):
55    """Transform argument string into a suitable filename
56    """
57
58    s = s.strip()
59    s = s.replace(' ', '_')
60    s = s.replace('(', '')
61    s = s.replace(')', '')
62    s = s.replace('__', '_')
63
64    return s
65
66
67def check_dir(path, verbose=None):
68    """Check that specified path exists.
69    If path does not exist it will be created if possible
70
71    USAGE:
72       checkdir(path, verbose):
73
74    ARGUMENTS:
75        path -- Directory
76        verbose -- Flag verbose output (default: None)
77
78    RETURN VALUE:
79        Verified path including trailing separator
80
81    """
82
83    import os, sys
84    import os.path
85
86    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
87        unix = 0
88    else:
89        unix = 1
90
91
92    if path[-1] != os.sep:
93        path = path + os.sep  # Add separator for directories
94
95    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
96    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
97        try:
98            exitcode=os.mkdir(path)
99
100            # Change access rights if possible
101            #
102            if unix:
103                exitcode=os.system('chmod 775 '+path)
104            else:
105                pass  # FIXME: What about acces rights under Windows?
106
107            if verbose: print 'MESSAGE: Directory', path, 'created.'
108
109        except:
110            print 'WARNING: Directory', path, 'could not be created.'
111            if unix:
112                path = '/tmp/'
113            else:
114                path = 'C:'
115
116            print 'Using directory %s instead' %path
117
118    return(path)
119
120
121
122def del_dir(path):
123    """Recursively delete directory path and all its contents
124    """
125
126    import os
127
128    if os.path.isdir(path):
129        for file in os.listdir(path):
130            X = os.path.join(path, file)
131
132
133            if os.path.isdir(X) and not os.path.islink(X):
134                del_dir(X)
135            else:
136                try:
137                    os.remove(X)
138                except:
139                    print "Could not remove file %s" %X
140
141        os.rmdir(path)
142
143
144
145def create_filename(datadir, filename, format, size=None, time=None):
146
147    import os
148    #from config import data_dir
149
150    FN = check_dir(datadir) + filename
151
152    if size is not None:
153        FN += '_size%d' %size
154
155    if time is not None:
156        FN += '_time%.2f' %time
157
158    FN += '.' + format
159    return FN
160
161
162def get_files(datadir, filename, format, size):
163    """Get all file (names) with gven name, size and format
164    """
165
166    import glob
167
168    import os
169    #from config import data_dir
170
171    dir = check_dir(datadir)
172
173    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
174    return glob.glob(pattern)
175
176
177
178#Generic class for storing output to e.g. visualisation or checkpointing
179class Data_format:
180    """Generic interface to data formats
181    """
182
183
184    def __init__(self, domain, extension, mode = 'w'):
185        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
186                                        '''   'w' (write)'''+\
187                                        '''   'r' (read)''' +\
188                                        '''   'a' (append)'''
189
190        #Create filename
191        #self.filename = create_filename(domain.get_datadir(),
192        #domain.get_name(), extension, len(domain))
193
194
195        self.filename = create_filename(domain.get_datadir(),
196                        domain.get_name(), extension)
197
198        #print 'F', self.filename
199        self.timestep = 0
200        self.number_of_volumes = len(domain)
201        self.domain = domain
202
203
204        #FIXME: Should we have a general set_precision function?
205
206
207
208#Class for storing output to e.g. visualisation
209class Data_format_sww(Data_format):
210    """Interface to native NetCDF format (.sww) for storing model output
211
212    There are two kinds of data
213
214    1: Constant data: Vertex coordinates and field values. Stored once
215    2: Variable data: Conserved quantities. Stored once per timestep.
216
217    All data is assumed to reside at vertex locations.
218    """
219
220
221    def __init__(self, domain, mode = 'w',\
222                 max_size = 2000000000,
223                 recursion=False):
224        from Scientific.IO.NetCDF import NetCDFFile
225        from Numeric import Int, Float, Float32
226
227        self.precision = Float32 #Use single precision
228        if hasattr(domain, 'max_size'):
229            self.max_size = domain.max_size #file size max is 2Gig
230        else:
231            self.max_size = max_size
232        self.recursion = recursion
233        self.mode = mode
234
235        Data_format.__init__(self, domain, 'sww', mode)
236
237
238        # NetCDF file definition
239        fid = NetCDFFile(self.filename, mode)
240
241        if mode == 'w':
242
243            #Create new file
244            fid.institution = 'Geoscience Australia'
245            fid.description = 'Output from pyvolution suitable for plotting'
246
247            if domain.smooth:
248                fid.smoothing = 'Yes'
249            else:
250                fid.smoothing = 'No'
251
252            fid.order = domain.default_order
253           
254            if hasattr(domain, 'texture'):                 
255                fid.texture = domain.texture
256            #else:                         
257            #    fid.texture = 'None'       
258
259            #Reference point
260            #Start time in seconds since the epoch (midnight 1/1/1970)
261            #FIXME: Use Georef
262            fid.starttime = domain.starttime
263
264            # dimension definitions
265            fid.createDimension('number_of_volumes', self.number_of_volumes)
266            fid.createDimension('number_of_vertices', 3)
267
268            if domain.smooth is True:
269                fid.createDimension('number_of_points', len(domain.vertexlist))
270            else:
271                fid.createDimension('number_of_points', 3*self.number_of_volumes)
272
273            fid.createDimension('number_of_timesteps', None) #extensible
274
275            # variable definitions
276            fid.createVariable('x', self.precision, ('number_of_points',))
277            fid.createVariable('y', self.precision, ('number_of_points',))
278            fid.createVariable('elevation', self.precision, ('number_of_points',))
279            if domain.geo_reference is not None:
280                domain.geo_reference.write_NetCDF(fid)
281               
282            #FIXME: Backwards compatibility
283            fid.createVariable('z', self.precision, ('number_of_points',))
284            #################################
285
286            fid.createVariable('volumes', Int, ('number_of_volumes',
287                                                'number_of_vertices'))
288
289            fid.createVariable('time', self.precision,
290                               ('number_of_timesteps',))
291
292            fid.createVariable('stage', self.precision,
293                               ('number_of_timesteps',
294                                'number_of_points'))
295
296            fid.createVariable('xmomentum', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('ymomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304        #Close
305        fid.close()
306
307
308    def store_connectivity(self):
309        """Specialisation of store_connectivity for net CDF format
310
311        Writes x,y,z coordinates of triangles constituting
312        the bed elevation.
313        """
314
315        from Scientific.IO.NetCDF import NetCDFFile
316
317        from Numeric import concatenate, Int
318
319        domain = self.domain
320
321        #Get NetCDF
322        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
323
324        # Get the variables
325        x = fid.variables['x']
326        y = fid.variables['y']
327        z = fid.variables['elevation']
328
329        volumes = fid.variables['volumes']
330
331        # Get X, Y and bed elevation Z
332        Q = domain.quantities['elevation']
333        X,Y,Z,V = Q.get_vertex_values(xy=True,
334                                      precision = self.precision)
335
336
337
338        x[:] = X.astype(self.precision)
339        y[:] = Y.astype(self.precision)
340        z[:] = Z.astype(self.precision)
341
342        #FIXME: Backwards compatibility
343        z = fid.variables['z']
344        z[:] = Z.astype(self.precision)
345        ################################
346
347        volumes[:] = V.astype(volumes.typecode())
348
349        #Close
350        fid.close()
351
352    def store_timestep(self, names):
353        """Store time and named quantities to file
354        """
355        from Scientific.IO.NetCDF import NetCDFFile
356        import types
357        from time import sleep
358        from os import stat
359
360        minimum_allowed_depth = 0.001
361        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
362        from Numeric import choose
363
364        #Get NetCDF
365        retries = 0
366        file_open = False
367        while not file_open and retries < 10:
368            try:
369                fid = NetCDFFile(self.filename, 'a') #Open existing file
370            except IOError:
371                #This could happen if someone was reading the file.
372                #In that case, wait a while and try again
373                msg = 'Warning (store_timestep): File %s could not be opened'\
374                      %self.filename
375                msg += ' - trying step %s again' %self.domain.time
376                print msg
377                retries += 1
378                sleep(1)
379            else:
380                file_open = True
381
382        if not file_open:
383            msg = 'File %s could not be opened for append' %self.filename
384            raise msg
385
386
387
388        #Check to see if the file is already too big:
389        time = fid.variables['time']
390        i = len(time)+1
391        file_size = stat(self.filename)[6]
392        file_size_increase =  file_size/i
393        if file_size + file_size_increase > self.max_size*(2**self.recursion):
394            #in order to get the file name and start time correct,
395            #I change the domian.filename and domain.starttime.
396            #This is the only way to do this without changing
397            #other modules (I think).
398
399            #write a filename addon that won't break swollens reader
400            #(10.sww is bad)
401            filename_ext = '_time_%s'%self.domain.time
402            filename_ext = filename_ext.replace('.', '_')
403            #remember the old filename, then give domain a
404            #name with the extension
405            old_domain_filename = self.domain.filename
406            if not self.recursion:
407                self.domain.filename = self.domain.filename+filename_ext
408
409            #change the domain starttime to the current time
410            old_domain_starttime = self.domain.starttime
411            self.domain.starttime = self.domain.time
412
413            #build a new data_structure.
414            next_data_structure=\
415                Data_format_sww(self.domain, mode=self.mode,\
416                                max_size = self.max_size,\
417                                recursion = self.recursion+1)
418            if not self.recursion:
419                print '    file_size = %s'%file_size
420                print '    saving file to %s'%next_data_structure.filename
421            #set up the new data_structure
422            self.domain.writer = next_data_structure
423
424            #FIXME - could be cleaner to use domain.store_timestep etc.
425            next_data_structure.store_connectivity()
426            next_data_structure.store_timestep(names)
427            fid.sync()
428            fid.close()
429
430            #restore the old starttime and filename
431            self.domain.starttime = old_domain_starttime
432            self.domain.filename = old_domain_filename
433        else:
434            self.recursion = False
435            domain = self.domain
436
437            # Get the variables
438            time = fid.variables['time']
439            stage = fid.variables['stage']
440            xmomentum = fid.variables['xmomentum']
441            ymomentum = fid.variables['ymomentum']
442            i = len(time)
443
444            #Store time
445            time[i] = self.domain.time
446
447
448            if type(names) not in [types.ListType, types.TupleType]:
449                names = [names]
450
451            for name in names:
452                # Get quantity
453                Q = domain.quantities[name]
454                A,V = Q.get_vertex_values(xy = False,
455                                          precision = self.precision)
456
457                #FIXME: Make this general (see below)
458                if name == 'stage':
459                    z = fid.variables['elevation']
460                    #print z[:]
461                    #print A-z[:]
462                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
463                    stage[i,:] = A.astype(self.precision)
464                elif name == 'xmomentum':
465                    xmomentum[i,:] = A.astype(self.precision)
466                elif name == 'ymomentum':
467                    ymomentum[i,:] = A.astype(self.precision)
468
469                #As in....
470                #eval( name + '[i,:] = A.astype(self.precision)' )
471                #FIXME: But we need a UNIT test for that before refactoring
472
473
474
475            #Flush and close
476            fid.sync()
477            fid.close()
478
479
480
481#Class for handling checkpoints data
482class Data_format_cpt(Data_format):
483    """Interface to native NetCDF format (.cpt)
484    """
485
486
487    def __init__(self, domain, mode = 'w'):
488        from Scientific.IO.NetCDF import NetCDFFile
489        from Numeric import Int, Float, Float
490
491        self.precision = Float #Use full precision
492
493        Data_format.__init__(self, domain, 'sww', mode)
494
495
496        # NetCDF file definition
497        fid = NetCDFFile(self.filename, mode)
498
499        if mode == 'w':
500            #Create new file
501            fid.institution = 'Geoscience Australia'
502            fid.description = 'Checkpoint data'
503            #fid.smooth = domain.smooth
504            fid.order = domain.default_order
505
506            # dimension definitions
507            fid.createDimension('number_of_volumes', self.number_of_volumes)
508            fid.createDimension('number_of_vertices', 3)
509
510            #Store info at all vertices (no smoothing)
511            fid.createDimension('number_of_points', 3*self.number_of_volumes)
512            fid.createDimension('number_of_timesteps', None) #extensible
513
514            # variable definitions
515
516            #Mesh
517            fid.createVariable('x', self.precision, ('number_of_points',))
518            fid.createVariable('y', self.precision, ('number_of_points',))
519
520
521            fid.createVariable('volumes', Int, ('number_of_volumes',
522                                                'number_of_vertices'))
523
524            fid.createVariable('time', self.precision,
525                               ('number_of_timesteps',))
526
527            #Allocate space for all quantities
528            for name in domain.quantities.keys():
529                fid.createVariable(name, self.precision,
530                                   ('number_of_timesteps',
531                                    'number_of_points'))
532
533        #Close
534        fid.close()
535
536
537    def store_checkpoint(self):
538        """
539        Write x,y coordinates of triangles.
540        Write connectivity (
541        constituting
542        the bed elevation.
543        """
544
545        from Scientific.IO.NetCDF import NetCDFFile
546
547        from Numeric import concatenate
548
549        domain = self.domain
550
551        #Get NetCDF
552        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
553
554        # Get the variables
555        x = fid.variables['x']
556        y = fid.variables['y']
557
558        volumes = fid.variables['volumes']
559
560        # Get X, Y and bed elevation Z
561        Q = domain.quantities['elevation']
562        X,Y,Z,V = Q.get_vertex_values(xy=True,
563                                      precision = self.precision)
564
565
566
567        x[:] = X.astype(self.precision)
568        y[:] = Y.astype(self.precision)
569        z[:] = Z.astype(self.precision)
570
571        volumes[:] = V
572
573        #Close
574        fid.close()
575
576
577    def store_timestep(self, name):
578        """Store time and named quantity to file
579        """
580        from Scientific.IO.NetCDF import NetCDFFile
581        from time import sleep
582
583        #Get NetCDF
584        retries = 0
585        file_open = False
586        while not file_open and retries < 10:
587            try:
588                fid = NetCDFFile(self.filename, 'a') #Open existing file
589            except IOError:
590                #This could happen if someone was reading the file.
591                #In that case, wait a while and try again
592                msg = 'Warning (store_timestep): File %s could not be opened'\
593                          %self.filename
594                msg += ' - trying again'
595                print msg
596                retries += 1
597                sleep(1)
598            else:
599                file_open = True
600
601            if not file_open:
602                msg = 'File %s could not be opened for append' %self.filename
603            raise msg
604
605
606        domain = self.domain
607
608        # Get the variables
609        time = fid.variables['time']
610        stage = fid.variables['stage']
611        i = len(time)
612
613        #Store stage
614        time[i] = self.domain.time
615
616        # Get quantity
617        Q = domain.quantities[name]
618        A,V = Q.get_vertex_values(xy=False,
619                                  precision = self.precision)
620
621        stage[i,:] = A.astype(self.precision)
622
623        #Flush and close
624        fid.sync()
625        fid.close()
626
627
628
629
630
631#Function for storing xya output
632#FIXME Not done yet for this version
633class Data_format_xya(Data_format):
634    """Generic interface to data formats
635    """
636
637
638    def __init__(self, domain, mode = 'w'):
639        from Scientific.IO.NetCDF import NetCDFFile
640        from Numeric import Int, Float, Float32
641
642        self.precision = Float32 #Use single precision
643
644        Data_format.__init__(self, domain, 'xya', mode)
645
646
647
648    #FIXME -This is the old xya format
649    def store_all(self):
650        """Specialisation of store all for xya format
651
652        Writes x,y,z coordinates of triangles constituting
653        the bed elevation.
654        """
655
656        from Numeric import concatenate
657
658        domain = self.domain
659
660        fd = open(self.filename, 'w')
661
662
663        if domain.smooth is True:
664            number_of_points =  len(domain.vertexlist)
665        else:
666            number_of_points = 3*self.number_of_volumes
667
668        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
669
670        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
671                 " # <vertex #> <x> <y> [attributes]" + "\n")
672
673
674        # Get X, Y, bed elevation and friction (index=0,1)
675        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
676                                           indices = (0,1), precision = self.precision)
677
678        bed_eles = A[:,0]
679        fricts = A[:,1]
680
681        # Get stage (index=0)
682        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
683                                       indices = (0,), precision = self.precision)
684
685        stages = B[:,0]
686
687        #<vertex #> <x> <y> [attributes]
688        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
689                                               stages, fricts):
690
691            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
692            fd.write(s)
693
694        #close
695        fd.close()
696
697
698    def store_timestep(self, t, V0, V1, V2):
699        """Store time, water heights (and momentums) to file
700        """
701        pass
702
703
704#Auxiliary
705def write_obj(filename,x,y,z):
706    """Store x,y,z vectors into filename (obj format)
707       Vectors are assumed to have dimension (M,3) where
708       M corresponds to the number elements.
709       triangles are assumed to be disconnected
710
711       The three numbers in each vector correspond to three vertices,
712
713       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
714
715    """
716    #print 'Writing obj to %s' % filename
717
718    import os.path
719
720    root, ext = os.path.splitext(filename)
721    if ext == '.obj':
722        FN = filename
723    else:
724        FN = filename + '.obj'
725
726
727    outfile = open(FN, 'wb')
728    outfile.write("# Triangulation as an obj file\n")
729
730    M, N = x.shape
731    assert N==3  #Assuming three vertices per element
732
733    for i in range(M):
734        for j in range(N):
735            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
736
737    for i in range(M):
738        base = i*N
739        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
740
741    outfile.close()
742
743
744#########################################################
745#Conversion routines
746########################################################
747
748def sww2obj(basefilename, size):
749    """Convert netcdf based data output to obj
750    """
751    from Scientific.IO.NetCDF import NetCDFFile
752
753    from Numeric import Float, zeros
754
755    #Get NetCDF
756    FN = create_filename('.', basefilename, 'sww', size)
757    print 'Reading from ', FN
758    fid = NetCDFFile(FN, 'r')  #Open existing file for read
759
760
761    # Get the variables
762    x = fid.variables['x']
763    y = fid.variables['y']
764    z = fid.variables['elevation']
765    time = fid.variables['time']
766    stage = fid.variables['stage']
767
768    M = size  #Number of lines
769    xx = zeros((M,3), Float)
770    yy = zeros((M,3), Float)
771    zz = zeros((M,3), Float)
772
773    for i in range(M):
774        for j in range(3):
775            xx[i,j] = x[i+j*M]
776            yy[i,j] = y[i+j*M]
777            zz[i,j] = z[i+j*M]
778
779    #Write obj for bathymetry
780    FN = create_filename('.', basefilename, 'obj', size)
781    write_obj(FN,xx,yy,zz)
782
783
784    #Now read all the data with variable information, combine with
785    #x,y info and store as obj
786
787    for k in range(len(time)):
788        t = time[k]
789        print 'Processing timestep %f' %t
790
791        for i in range(M):
792            for j in range(3):
793                zz[i,j] = stage[k,i+j*M]
794
795
796        #Write obj for variable data
797        #FN = create_filename(basefilename, 'obj', size, time=t)
798        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
799        write_obj(FN,xx,yy,zz)
800
801
802def dat2obj(basefilename):
803    """Convert line based data output to obj
804    FIXME: Obsolete?
805    """
806
807    import glob, os
808    from config import data_dir
809
810
811    #Get bathymetry and x,y's
812    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
813
814    from Numeric import zeros, Float
815
816    M = len(lines)  #Number of lines
817    x = zeros((M,3), Float)
818    y = zeros((M,3), Float)
819    z = zeros((M,3), Float)
820
821    ##i = 0
822    for i, line in enumerate(lines):
823        tokens = line.split()
824        values = map(float,tokens)
825
826        for j in range(3):
827            x[i,j] = values[j*3]
828            y[i,j] = values[j*3+1]
829            z[i,j] = values[j*3+2]
830
831        ##i += 1
832
833
834    #Write obj for bathymetry
835    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
836
837
838    #Now read all the data files with variable information, combine with
839    #x,y info
840    #and store as obj
841
842    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
843
844    for filename in files:
845        print 'Processing %s' % filename
846
847        lines = open(data_dir+os.sep+filename,'r').readlines()
848        assert len(lines) == M
849        root, ext = os.path.splitext(filename)
850
851        #Get time from filename
852        i0 = filename.find('_time=')
853        if i0 == -1:
854            #Skip bathymetry file
855            continue
856
857        i0 += 6  #Position where time starts
858        i1 = filename.find('.dat')
859
860        if i1 > i0:
861            t = float(filename[i0:i1])
862        else:
863            raise 'Hmmmm'
864
865
866
867        ##i = 0
868        for i, line in enumerate(lines):
869            tokens = line.split()
870            values = map(float,tokens)
871
872            for j in range(3):
873                z[i,j] = values[j]
874
875            ##i += 1
876
877        #Write obj for variable data
878        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
879
880
881def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
882    """Read netcdf filename1, pick timesteps first:step:last and save to
883    nettcdf file filename2
884    """
885    from Scientific.IO.NetCDF import NetCDFFile
886
887    #Get NetCDF
888    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
889    outfile = NetCDFFile(filename2, 'w')  #Open new file
890
891
892    #Copy dimensions
893    for d in infile.dimensions:
894        outfile.createDimension(d, infile.dimensions[d])
895
896    for name in infile.variables:
897        var = infile.variables[name]
898        outfile.createVariable(name, var.typecode(), var.dimensions)
899
900
901    #Copy the static variables
902    for name in infile.variables:
903        if name == 'time' or name == 'stage':
904            pass
905        else:
906            #Copy
907            outfile.variables[name][:] = infile.variables[name][:]
908
909    #Copy selected timesteps
910    time = infile.variables['time']
911    stage = infile.variables['stage']
912
913    newtime = outfile.variables['time']
914    newstage = outfile.variables['stage']
915
916    if last is None:
917        last = len(time)
918
919    selection = range(first, last, step)
920    for i, j in enumerate(selection):
921        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
922        newtime[i] = time[j]
923        newstage[i,:] = stage[j,:]
924
925    #Close
926    infile.close()
927    outfile.close()
928
929
930#Get data objects
931def get_dataobject(domain, mode='w'):
932    """Return instance of class of given format using filename
933    """
934
935    cls = eval('Data_format_%s' %domain.format)
936    return cls(domain, mode)
937
938def xya2pts(basename_in, basename_out=None, verbose=False,
939            #easting_min=None, easting_max=None,
940            #northing_min=None, northing_max=None,
941            stride = 1,           
942            attribute_name = 'elevation',
943            z_func = None):
944    """Read points data from ascii (.xya)
945
946    Example:
947
948              x(m)        y(m)        z(m)
949         0.00000e+00  0.00000e+00  1.3535000e-01
950         0.00000e+00  1.40000e-02  1.3535000e-01
951
952   
953
954    Convert to NetCDF pts format which is
955
956    points:  (Nx2) Float array
957    elevation: N Float array
958
959    Only lines that contain three numeric values are processed
960
961    If z_func is specified, it will be applied to the third field
962    """
963
964    import os 
965    from Scientific.IO.NetCDF import NetCDFFile
966    from Numeric import Float, arrayrange, concatenate
967
968    root, ext = os.path.splitext(basename_in)
969
970    if ext == '': ext = '.xya'
971
972    #Get NetCDF
973    infile = open(root + ext, 'r')  #Open existing xya file for read
974
975    if verbose: print 'Reading xya points from %s' %(root + ext)
976
977    points = []
978    attribute = []
979    for i, line in enumerate(infile.readlines()):
980
981        if i % stride != 0: continue
982       
983        fields = line.split()
984
985        try:
986            assert len(fields) == 3
987        except:   
988            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
989
990        try:
991            x = float( fields[0] )
992            y = float( fields[1] )
993            z = float( fields[2] )           
994        except:
995            continue
996
997        points.append( [x, y] )
998
999        if callable(z_func):
1000            attribute.append(z_func(z))           
1001        else:   
1002            attribute.append(z)
1003
1004
1005    #Get output file
1006    if basename_out == None:
1007        ptsname = root + '.pts'
1008    else:
1009        ptsname = basename_out + '.pts'
1010
1011    if verbose: print 'Store to NetCDF file %s' %ptsname
1012    # NetCDF file definition
1013    outfile = NetCDFFile(ptsname, 'w')
1014
1015    #Create new file
1016    outfile.institution = 'Geoscience Australia'
1017    outfile.description = 'NetCDF pts format for compact and '\
1018                          'portable storage of spatial point data'
1019   
1020
1021    #Georeferencing
1022    from coordinate_transforms.geo_reference import Geo_reference
1023    Geo_reference().write_NetCDF(outfile)
1024   
1025
1026    outfile.createDimension('number_of_points', len(points))
1027    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1028
1029    # variable definitions
1030    outfile.createVariable('points', Float, ('number_of_points',
1031                                             'number_of_dimensions'))
1032    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1033
1034    # Get handles to the variables
1035    nc_points = outfile.variables['points']
1036    nc_attribute = outfile.variables[attribute_name]
1037
1038    #Store data
1039    nc_points[:, :] = points
1040    nc_attribute[:] = attribute
1041
1042    infile.close()
1043    outfile.close()
1044   
1045def dem2pts(basename_in, basename_out=None, verbose=False,
1046            easting_min=None, easting_max=None,
1047            northing_min=None, northing_max=None):
1048    """Read Digitial Elevation model from the following NetCDF format (.dem)
1049
1050    Example:
1051
1052    ncols         3121
1053    nrows         1800
1054    xllcorner     722000
1055    yllcorner     5893000
1056    cellsize      25
1057    NODATA_value  -9999
1058    138.3698 137.4194 136.5062 135.5558 ..........
1059
1060    Convert to NetCDF pts format which is
1061
1062    points:  (Nx2) Float array
1063    elevation: N Float array
1064    """
1065
1066    import os
1067    from Scientific.IO.NetCDF import NetCDFFile
1068    from Numeric import Float, zeros, reshape
1069
1070    root = basename_in
1071
1072    #Get NetCDF
1073    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1074
1075    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1076
1077    ncols = infile.ncols[0]
1078    nrows = infile.nrows[0]
1079    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1080    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1081    cellsize = infile.cellsize[0]
1082    NODATA_value = infile.NODATA_value[0]
1083    dem_elevation = infile.variables['elevation']
1084
1085    zone = infile.zone[0]
1086    false_easting = infile.false_easting[0]
1087    false_northing = infile.false_northing[0]
1088
1089    #Text strings
1090    projection = infile.projection
1091    datum = infile.datum
1092    units = infile.units
1093
1094
1095    #Get output file
1096    if basename_out == None:
1097        ptsname = root + '.pts'
1098    else:
1099        ptsname = basename_out + '.pts'
1100
1101    if verbose: print 'Store to NetCDF file %s' %ptsname
1102    # NetCDF file definition
1103    outfile = NetCDFFile(ptsname, 'w')
1104
1105    #Create new file
1106    outfile.institution = 'Geoscience Australia'
1107    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1108                      'of spatial point data'
1109    #assign default values
1110    if easting_min is None: easting_min = xllcorner
1111    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1112    if northing_min is None: northing_min = yllcorner
1113    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1114
1115    #compute offsets to update georeferencing
1116    easting_offset = xllcorner - easting_min
1117    northing_offset = yllcorner - northing_min
1118
1119    #Georeferencing
1120    outfile.zone = zone
1121    outfile.xllcorner = easting_min #Easting of lower left corner
1122    outfile.yllcorner = northing_min #Northing of lower left corner
1123    outfile.false_easting = false_easting
1124    outfile.false_northing = false_northing
1125
1126    outfile.projection = projection
1127    outfile.datum = datum
1128    outfile.units = units
1129
1130
1131    #Grid info (FIXME: probably not going to be used, but heck)
1132    outfile.ncols = ncols
1133    outfile.nrows = nrows
1134
1135
1136    # dimension definitions
1137    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1138    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1139    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1140    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1141
1142    # variable definitions
1143    outfile.createVariable('points', Float, ('number_of_points',
1144                                             'number_of_dimensions'))
1145    outfile.createVariable('elevation', Float, ('number_of_points',))
1146
1147    # Get handles to the variables
1148    points = outfile.variables['points']
1149    elevation = outfile.variables['elevation']
1150
1151    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1152
1153    #Store data
1154    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1155    global_index = 0
1156    for i in range(nrows):
1157        if verbose: print 'Processing row %d of %d' %(i, nrows)
1158        lower_index = global_index
1159        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1160        telev =  zeros(ncols_in_bounding_box, Float)
1161        local_index = 0
1162
1163        y = (nrows-i)*cellsize + yllcorner
1164        for j in range(ncols):
1165
1166            x = j*cellsize + xllcorner
1167            if easting_min <= x <= easting_max and \
1168               northing_min <= y <= northing_max:
1169                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1170                telev[local_index] = dem_elevation_r[i, j]
1171                global_index += 1
1172                local_index += 1
1173
1174        upper_index = global_index
1175
1176        if upper_index == lower_index + ncols_in_bounding_box:
1177            points[lower_index:upper_index, :] = tpoints
1178            elevation[lower_index:upper_index] = telev
1179
1180    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1181
1182    infile.close()
1183    outfile.close()
1184
1185
1186def sww2asc(basename_in, basename_out = None,
1187            quantity = None,
1188            timestep = None,
1189            reduction = None,
1190            cellsize = 10,
1191            verbose = False,
1192            origin = None):
1193    """Read SWW file and convert to Digitial Elevation model format (.asc)
1194
1195    Example:
1196
1197    ncols         3121
1198    nrows         1800
1199    xllcorner     722000
1200    yllcorner     5893000
1201    cellsize      25
1202    NODATA_value  -9999
1203    138.3698 137.4194 136.5062 135.5558 ..........
1204
1205    Also write accompanying file with same basename_in but extension .prj
1206    used to fix the UTM zone, datum, false northings and eastings.
1207
1208    The prj format is assumed to be as
1209
1210    Projection    UTM
1211    Zone          56
1212    Datum         WGS84
1213    Zunits        NO
1214    Units         METERS
1215    Spheroid      WGS84
1216    Xshift        0.0000000000
1217    Yshift        10000000.0000000000
1218    Parameters
1219
1220
1221    if quantity is given, out values from quantity otherwise default to
1222    elevation
1223
1224    if timestep (an index) is given, output quantity at that timestep
1225
1226    if reduction is given use that to reduce quantity over all timesteps.
1227
1228    """
1229    from Numeric import array, Float, concatenate, NewAxis, zeros,\
1230         sometrue
1231
1232
1233    #FIXME: Should be variable
1234    datum = 'WGS84'
1235    false_easting = 500000
1236    false_northing = 10000000
1237    NODATA_value = -9999
1238
1239    if quantity is None:
1240        quantity = 'elevation'
1241
1242    if reduction is None:
1243        reduction = max
1244
1245    if basename_out is None:
1246        basename_out = basename_in + '_%s' %quantity
1247
1248    swwfile = basename_in + '.sww'
1249    ascfile = basename_out + '.asc'
1250    prjfile = basename_out + '.prj'
1251
1252
1253    if verbose: print 'Reading from %s' %swwfile
1254    #Read sww file
1255    from Scientific.IO.NetCDF import NetCDFFile
1256    fid = NetCDFFile(swwfile)
1257
1258    #Get extent and reference
1259    x = fid.variables['x'][:]
1260    y = fid.variables['y'][:]
1261    volumes = fid.variables['volumes'][:]
1262
1263    ymin = min(y); ymax = max(y)
1264    xmin = min(x); xmax = max(x)
1265
1266    number_of_timesteps = fid.dimensions['number_of_timesteps']
1267    number_of_points = fid.dimensions['number_of_points']
1268    if origin is None:
1269
1270        #Get geo_reference
1271        #sww files don't have to have a geo_ref
1272        try:
1273            geo_reference = Geo_reference(NetCDFObject=fid)
1274        except AttributeError, e:
1275            geo_reference = Geo_reference() #Default georef object
1276
1277        xllcorner = geo_reference.get_xllcorner()
1278        yllcorner = geo_reference.get_yllcorner()
1279        zone = geo_reference.get_zone()
1280    else:
1281        zone = origin[0]
1282        xllcorner = origin[1]
1283        yllcorner = origin[2]
1284
1285
1286    #Get quantity and reduce if applicable
1287    if verbose: print 'Reading quantity %s' %quantity
1288
1289    if quantity.lower() == 'depth':
1290        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
1291    else:
1292        q = fid.variables[quantity][:]
1293
1294
1295    if len(q.shape) == 2:
1296        if verbose: print 'Reducing quantity %s' %quantity
1297        q_reduced = zeros( number_of_points, Float )
1298
1299        for k in range(number_of_points):
1300            q_reduced[k] = reduction( q[:,k] )
1301
1302        q = q_reduced
1303
1304    #Now q has dimension: number_of_points
1305
1306
1307    #Write prj file
1308    if verbose: print 'Writing %s' %prjfile
1309    prjid = open(prjfile, 'w')
1310    prjid.write('Projection    %s\n' %'UTM')
1311    prjid.write('Zone          %d\n' %zone)
1312    prjid.write('Datum         %s\n' %datum)
1313    prjid.write('Zunits        NO\n')
1314    prjid.write('Units         METERS\n')
1315    prjid.write('Spheroid      %s\n' %datum)
1316    prjid.write('Xshift        %d\n' %false_easting)
1317    prjid.write('Yshift        %d\n' %false_northing)
1318    prjid.write('Parameters\n')
1319    prjid.close()
1320
1321    #Create grid and update xll/yll corner and x,y
1322    if verbose: print 'Creating grid'
1323    ncols = int((xmax-xmin)/cellsize)+1
1324    nrows = int((ymax-ymin)/cellsize)+1
1325
1326    newxllcorner = xmin+xllcorner
1327    newyllcorner = ymin+yllcorner
1328
1329    x = x+xllcorner-newxllcorner
1330    y = y+yllcorner-newyllcorner
1331
1332    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1333    assert len(vertex_points.shape) == 2
1334
1335
1336    from Numeric import zeros, Float
1337    grid_points = zeros ( (ncols*nrows, 2), Float )
1338
1339
1340    for i in xrange(nrows):
1341        yg = i*cellsize
1342        for j in xrange(ncols):
1343            xg = j*cellsize
1344            k = i*ncols + j
1345
1346            #print k, xg, yg
1347            grid_points[k,0] = xg
1348            grid_points[k,1] = yg
1349
1350
1351    #Interpolate
1352
1353    from least_squares import Interpolation
1354    from util import inside_polygon
1355
1356    #FIXME: This should be done with precrop = True, otherwise it'll
1357    #take forever. With expand_search  set to False, some grid points might
1358    #miss out....
1359
1360    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1361                           precrop = False, expand_search = False,
1362                           verbose = verbose)
1363
1364    #Interpolate using quantity values
1365    if verbose: print 'Interpolating'
1366    grid_values = interp.interpolate(q).flat
1367
1368    #Write
1369    if verbose: print 'Writing %s' %ascfile
1370    ascid = open(ascfile, 'w')
1371
1372    ascid.write('ncols         %d\n' %ncols)
1373    ascid.write('nrows         %d\n' %nrows)
1374    ascid.write('xllcorner     %d\n' %newxllcorner)
1375    ascid.write('yllcorner     %d\n' %newyllcorner)
1376    ascid.write('cellsize      %f\n' %cellsize)
1377    ascid.write('NODATA_value  %d\n' %NODATA_value)
1378
1379
1380    #Get bounding polygon from mesh
1381    P = interp.mesh.get_boundary_polygon()
1382    inside_indices = inside_polygon(grid_points, P)
1383
1384    for i in range(nrows):
1385        if verbose and i%((nrows+10)/10)==0:
1386            print 'Doing row %d of %d' %(i, nrows)
1387
1388        for j in range(ncols):
1389            index = (nrows-i-1)*ncols+j
1390
1391            if sometrue(inside_indices == index):
1392                ascid.write('%f ' %grid_values[index])
1393            else:
1394                ascid.write('%d ' %NODATA_value)
1395
1396        ascid.write('\n')
1397
1398    #Close
1399    ascid.close()
1400    fid.close()
1401
1402
1403def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1404                                  verbose=False):
1405    """Read Digitial Elevation model from the following ASCII format (.asc)
1406
1407    Example:
1408
1409    ncols         3121
1410    nrows         1800
1411    xllcorner     722000
1412    yllcorner     5893000
1413    cellsize      25
1414    NODATA_value  -9999
1415    138.3698 137.4194 136.5062 135.5558 ..........
1416
1417    Convert basename_in + '.asc' to NetCDF format (.dem)
1418    mimicking the ASCII format closely.
1419
1420
1421    An accompanying file with same basename_in but extension .prj must exist
1422    and is used to fix the UTM zone, datum, false northings and eastings.
1423
1424    The prj format is assumed to be as
1425
1426    Projection    UTM
1427    Zone          56
1428    Datum         WGS84
1429    Zunits        NO
1430    Units         METERS
1431    Spheroid      WGS84
1432    Xshift        0.0000000000
1433    Yshift        10000000.0000000000
1434    Parameters
1435    """
1436
1437    import os
1438    from Scientific.IO.NetCDF import NetCDFFile
1439    from Numeric import Float, array
1440
1441    #root, ext = os.path.splitext(basename_in)
1442    root = basename_in
1443
1444    ###########################################
1445    # Read Meta data
1446    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1447    metadatafile = open(root + '.prj')
1448    metalines = metadatafile.readlines()
1449    metadatafile.close()
1450
1451    L = metalines[0].strip().split()
1452    assert L[0].strip().lower() == 'projection'
1453    projection = L[1].strip()                   #TEXT
1454
1455    L = metalines[1].strip().split()
1456    assert L[0].strip().lower() == 'zone'
1457    zone = int(L[1].strip())
1458
1459    L = metalines[2].strip().split()
1460    assert L[0].strip().lower() == 'datum'
1461    datum = L[1].strip()                        #TEXT
1462
1463    L = metalines[3].strip().split()
1464    assert L[0].strip().lower() == 'zunits'     #IGNORE
1465    zunits = L[1].strip()                       #TEXT
1466
1467    L = metalines[4].strip().split()
1468    assert L[0].strip().lower() == 'units'
1469    units = L[1].strip()                        #TEXT
1470
1471    L = metalines[5].strip().split()
1472    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1473    spheroid = L[1].strip()                     #TEXT
1474
1475    L = metalines[6].strip().split()
1476    assert L[0].strip().lower() == 'xshift'
1477    false_easting = float(L[1].strip())
1478
1479    L = metalines[7].strip().split()
1480    assert L[0].strip().lower() == 'yshift'
1481    false_northing = float(L[1].strip())
1482
1483    #print false_easting, false_northing, zone, datum
1484
1485
1486    ###########################################
1487    #Read DEM data
1488
1489    datafile = open(basename_in + '.asc')
1490
1491    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1492    lines = datafile.readlines()
1493    datafile.close()
1494
1495    if verbose: print 'Got', len(lines), ' lines'
1496
1497    ncols = int(lines[0].split()[1].strip())
1498    nrows = int(lines[1].split()[1].strip())
1499    xllcorner = float(lines[2].split()[1].strip())
1500    yllcorner = float(lines[3].split()[1].strip())
1501    cellsize = float(lines[4].split()[1].strip())
1502    NODATA_value = int(lines[5].split()[1].strip())
1503
1504    assert len(lines) == nrows + 6
1505
1506
1507    ##########################################
1508
1509
1510    if basename_out == None:
1511        netcdfname = root + '.dem'
1512    else:
1513        netcdfname = basename_out + '.dem'
1514
1515    if verbose: print 'Store to NetCDF file %s' %netcdfname
1516    # NetCDF file definition
1517    fid = NetCDFFile(netcdfname, 'w')
1518
1519    #Create new file
1520    fid.institution = 'Geoscience Australia'
1521    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1522                      'of spatial point data'
1523
1524    fid.ncols = ncols
1525    fid.nrows = nrows
1526    fid.xllcorner = xllcorner
1527    fid.yllcorner = yllcorner
1528    fid.cellsize = cellsize
1529    fid.NODATA_value = NODATA_value
1530
1531    fid.zone = zone
1532    fid.false_easting = false_easting
1533    fid.false_northing = false_northing
1534    fid.projection = projection
1535    fid.datum = datum
1536    fid.units = units
1537
1538
1539    # dimension definitions
1540    fid.createDimension('number_of_rows', nrows)
1541    fid.createDimension('number_of_columns', ncols)
1542
1543    # variable definitions
1544    fid.createVariable('elevation', Float, ('number_of_rows',
1545                                            'number_of_columns'))
1546
1547    # Get handles to the variables
1548    elevation = fid.variables['elevation']
1549
1550    #Store data
1551    for i, line in enumerate(lines[6:]):
1552        fields = line.split()
1553        if verbose: print 'Processing row %d of %d' %(i, nrows)
1554
1555        elevation[i, :] = array([float(x) for x in fields])
1556
1557    fid.close()
1558
1559
1560
1561def ferret2sww(basename_in, basename_out = None,
1562               verbose = False,
1563               minlat = None, maxlat = None,
1564               minlon = None, maxlon = None,
1565               mint = None, maxt = None, mean_stage = 0,
1566               origin = None, zscale = 1,
1567               fail_on_NaN = True,
1568               NaN_filler = 0,
1569               elevation = None,
1570               inverted_bathymetry = False
1571               ): #FIXME: Bathymetry should be obtained
1572                                  #from MOST somehow.
1573                                  #Alternatively from elsewhere
1574                                  #or, as a last resort,
1575                                  #specified here.
1576                                  #The value of -100 will work
1577                                  #for the Wollongong tsunami
1578                                  #scenario but is very hacky
1579    """Convert 'Ferret' NetCDF format for wave propagation to
1580    sww format native to pyvolution.
1581
1582    Specify only basename_in and read files of the form
1583    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1584    relative height, x-velocity and y-velocity, respectively.
1585
1586    Also convert latitude and longitude to UTM. All coordinates are
1587    assumed to be given in the GDA94 datum.
1588
1589    min's and max's: If omitted - full extend is used.
1590    To include a value min may equal it, while max must exceed it.
1591    Lat and lon are assuemd to be in decimal degrees
1592
1593    origin is a 3-tuple with geo referenced
1594    UTM coordinates (zone, easting, northing)
1595
1596    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1597    which means that longitude is the fastest
1598    varying dimension (row major order, so to speak)
1599
1600    ferret2sww uses grid points as vertices in a triangular grid
1601    counting vertices from lower left corner upwards, then right
1602    """
1603
1604    import os
1605    from Scientific.IO.NetCDF import NetCDFFile
1606    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1607    precision = Float
1608
1609
1610    #Get NetCDF data
1611    if verbose: print 'Reading files %s_*.nc' %basename_in
1612    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1613    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1614    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1615    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1616
1617    if basename_out is None:
1618        swwname = basename_in + '.sww'
1619    else:
1620        swwname = basename_out + '.sww'
1621
1622    times = file_h.variables['TIME']
1623    latitudes = file_h.variables['LAT']
1624    longitudes = file_h.variables['LON']
1625
1626    if mint == None:
1627        jmin = 0
1628    else:
1629        jmin = searchsorted(times, mint)
1630
1631    if maxt == None:
1632        jmax=len(times)
1633    else:
1634        jmax = searchsorted(times, maxt)
1635
1636    if minlat == None:
1637        kmin=0
1638    else:
1639        kmin = searchsorted(latitudes, minlat)
1640
1641    if maxlat == None:
1642        kmax = len(latitudes)
1643    else:
1644        kmax = searchsorted(latitudes, maxlat)
1645
1646    if minlon == None:
1647        lmin=0
1648    else:
1649        lmin = searchsorted(longitudes, minlon)
1650
1651    if maxlon == None:
1652        lmax = len(longitudes)
1653    else:
1654        lmax = searchsorted(longitudes, maxlon)
1655
1656
1657
1658    times = times[jmin:jmax]
1659    latitudes = latitudes[kmin:kmax]
1660    longitudes = longitudes[lmin:lmax]
1661
1662
1663    if verbose: print 'cropping'
1664    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1665    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1666    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1667    elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1668
1669#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1670#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1671#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1672#        from Numeric import asarray
1673#        elevations=elevations.tolist()
1674#        elevations.reverse()
1675#        elevations=asarray(elevations)
1676#    else:
1677#        from Numeric import asarray
1678#        elevations=elevations.tolist()
1679#        elevations.reverse()
1680#        elevations=asarray(elevations)
1681#        'print hmmm'
1682
1683
1684
1685    #Get missing values
1686    nan_ha = file_h.variables['HA'].missing_value[0]
1687    nan_ua = file_u.variables['UA'].missing_value[0]
1688    nan_va = file_v.variables['VA'].missing_value[0]
1689    if hasattr(file_e.variables['ELEVATION'],'missing_value'):
1690        nan_e  = file_e.variables['ELEVATION'].missing_value[0]
1691    else:
1692        nan_e = None
1693
1694    #Cleanup
1695    from Numeric import sometrue
1696
1697    missing = (amplitudes == nan_ha)
1698    if sometrue (missing):
1699        if fail_on_NaN:
1700            msg = 'NetCDFFile %s contains missing values'\
1701                  %(basename_in+'_ha.nc')
1702            raise msg
1703        else:
1704            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1705
1706    missing = (uspeed == nan_ua)
1707    if sometrue (missing):
1708        if fail_on_NaN:
1709            msg = 'NetCDFFile %s contains missing values'\
1710                  %(basename_in+'_ua.nc')
1711            raise msg
1712        else:
1713            uspeed = uspeed*(missing==0) + missing*NaN_filler
1714
1715    missing = (vspeed == nan_va)
1716    if sometrue (missing):
1717        if fail_on_NaN:
1718            msg = 'NetCDFFile %s contains missing values'\
1719                  %(basename_in+'_va.nc')
1720            raise msg
1721        else:
1722            vspeed = vspeed*(missing==0) + missing*NaN_filler
1723
1724
1725    missing = (elevations == nan_e)
1726    if sometrue (missing):
1727        if fail_on_NaN:
1728            msg = 'NetCDFFile %s contains missing values'\
1729                  %(basename_in+'_e.nc')
1730            raise msg
1731        else:
1732            elevations = elevations*(missing==0) + missing*NaN_filler
1733
1734    #######
1735
1736
1737
1738    number_of_times = times.shape[0]
1739    number_of_latitudes = latitudes.shape[0]
1740    number_of_longitudes = longitudes.shape[0]
1741
1742    assert amplitudes.shape[0] == number_of_times
1743    assert amplitudes.shape[1] == number_of_latitudes
1744    assert amplitudes.shape[2] == number_of_longitudes
1745
1746    if verbose:
1747        print '------------------------------------------------'
1748        print 'Statistics:'
1749        print '  Extent (lat/lon):'
1750        print '    lat in [%f, %f], len(lat) == %d'\
1751              %(min(latitudes.flat), max(latitudes.flat),
1752                len(latitudes.flat))
1753        print '    lon in [%f, %f], len(lon) == %d'\
1754              %(min(longitudes.flat), max(longitudes.flat),
1755                len(longitudes.flat))
1756        print '    t in [%f, %f], len(t) == %d'\
1757              %(min(times.flat), max(times.flat), len(times.flat))
1758
1759        q = amplitudes.flat
1760        name = 'Amplitudes (ha) [cm]'
1761        print %s in [%f, %f]' %(name, min(q), max(q))
1762
1763        q = uspeed.flat
1764        name = 'Speeds (ua) [cm/s]'
1765        print %s in [%f, %f]' %(name, min(q), max(q))
1766
1767        q = vspeed.flat
1768        name = 'Speeds (va) [cm/s]'
1769        print %s in [%f, %f]' %(name, min(q), max(q))
1770
1771        q = elevations.flat
1772        name = 'Elevations (e) [m]'
1773        print %s in [%f, %f]' %(name, min(q), max(q))
1774
1775
1776    #print number_of_latitudes, number_of_longitudes
1777    number_of_points = number_of_latitudes*number_of_longitudes
1778    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1779
1780
1781    file_h.close()
1782    file_u.close()
1783    file_v.close()
1784    file_e.close()
1785
1786
1787    # NetCDF file definition
1788    outfile = NetCDFFile(swwname, 'w')
1789
1790    #Create new file
1791    outfile.institution = 'Geoscience Australia'
1792    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1793                          %(basename_in + '_ha.nc',
1794                            basename_in + '_ua.nc',
1795                            basename_in + '_va.nc',
1796                            basename_in + '_e.nc')
1797
1798
1799    #For sww compatibility
1800    outfile.smoothing = 'Yes'
1801    outfile.order = 1
1802
1803    #Start time in seconds since the epoch (midnight 1/1/1970)
1804    outfile.starttime = starttime = times[0]
1805    times = times - starttime  #Store relative times
1806
1807    # dimension definitions
1808    outfile.createDimension('number_of_volumes', number_of_volumes)
1809
1810    outfile.createDimension('number_of_vertices', 3)
1811    outfile.createDimension('number_of_points', number_of_points)
1812
1813
1814    #outfile.createDimension('number_of_timesteps', len(times))
1815    outfile.createDimension('number_of_timesteps', len(times))
1816
1817    # variable definitions
1818    outfile.createVariable('x', precision, ('number_of_points',))
1819    outfile.createVariable('y', precision, ('number_of_points',))
1820    outfile.createVariable('elevation', precision, ('number_of_points',))
1821
1822    #FIXME: Backwards compatibility
1823    outfile.createVariable('z', precision, ('number_of_points',))
1824    #################################
1825
1826    outfile.createVariable('volumes', Int, ('number_of_volumes',
1827                                            'number_of_vertices'))
1828
1829    outfile.createVariable('time', precision,
1830                           ('number_of_timesteps',))
1831
1832    outfile.createVariable('stage', precision,
1833                           ('number_of_timesteps',
1834                            'number_of_points'))
1835
1836    outfile.createVariable('xmomentum', precision,
1837                           ('number_of_timesteps',
1838                            'number_of_points'))
1839
1840    outfile.createVariable('ymomentum', precision,
1841                           ('number_of_timesteps',
1842                            'number_of_points'))
1843
1844
1845    #Store
1846    from coordinate_transforms.redfearn import redfearn
1847    x = zeros(number_of_points, Float)  #Easting
1848    y = zeros(number_of_points, Float)  #Northing
1849
1850
1851    if verbose: print 'Making triangular grid'
1852    #Check zone boundaries
1853    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
1854
1855    vertices = {}
1856    i = 0
1857    for k, lat in enumerate(latitudes):       #Y direction
1858        for l, lon in enumerate(longitudes):  #X direction
1859
1860            vertices[l,k] = i
1861
1862            zone, easting, northing = redfearn(lat,lon)
1863
1864            msg = 'Zone boundary crossed at longitude =', lon
1865            #assert zone == refzone, msg
1866            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
1867            x[i] = easting
1868            y[i] = northing
1869            i += 1
1870
1871
1872    #Construct 2 triangles per 'rectangular' element
1873    volumes = []
1874    for l in range(number_of_longitudes-1):    #X direction
1875        for k in range(number_of_latitudes-1): #Y direction
1876            v1 = vertices[l,k+1]
1877            v2 = vertices[l,k]
1878            v3 = vertices[l+1,k+1]
1879            v4 = vertices[l+1,k]
1880
1881            volumes.append([v1,v2,v3]) #Upper element
1882            volumes.append([v4,v3,v2]) #Lower element
1883
1884    volumes = array(volumes)
1885
1886    if origin == None:
1887        zone = refzone
1888        xllcorner = min(x)
1889        yllcorner = min(y)
1890    else:
1891        zone = origin[0]
1892        xllcorner = origin[1]
1893        yllcorner = origin[2]
1894
1895
1896    outfile.xllcorner = xllcorner
1897    outfile.yllcorner = yllcorner
1898    outfile.zone = zone
1899
1900
1901    if elevation is not None:
1902        z = elevation
1903    else:
1904        if inverted_bathymetry:
1905            z = -1*elevations
1906        else:
1907            z = elevations
1908        #FIXME: z should be obtained from MOST and passed in here
1909
1910    from Numeric import resize
1911    z = resize(z,outfile.variables['z'][:].shape)
1912    outfile.variables['x'][:] = x - xllcorner
1913    outfile.variables['y'][:] = y - yllcorner
1914    outfile.variables['z'][:] = z
1915    outfile.variables['elevation'][:] = z  #FIXME HACK
1916    outfile.variables['time'][:] = times   #Store time relative
1917    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
1918
1919
1920
1921    #Time stepping
1922    stage = outfile.variables['stage']
1923    xmomentum = outfile.variables['xmomentum']
1924    ymomentum = outfile.variables['ymomentum']
1925
1926    if verbose: print 'Converting quantities'
1927    n = len(times)
1928    for j in range(n):
1929        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
1930        i = 0
1931        for k in range(number_of_latitudes):      #Y direction
1932            for l in range(number_of_longitudes): #X direction
1933                w = zscale*amplitudes[j,k,l]/100 + mean_stage
1934                stage[j,i] = w
1935                h = w - z[i]
1936                xmomentum[j,i] = uspeed[j,k,l]/100*h
1937                ymomentum[j,i] = vspeed[j,k,l]/100*h
1938                i += 1
1939
1940
1941    if verbose:
1942        x = outfile.variables['x'][:]
1943        y = outfile.variables['y'][:]
1944        print '------------------------------------------------'
1945        print 'Statistics of output file:'
1946        print '  Name: %s' %swwname
1947        print '  Reference:'
1948        print '    Lower left corner: [%f, %f]'\
1949              %(xllcorner, yllcorner)
1950        print '    Start time: %f' %starttime
1951        print '  Extent:'
1952        print '    x [m] in [%f, %f], len(x) == %d'\
1953              %(min(x.flat), max(x.flat), len(x.flat))
1954        print '    y [m] in [%f, %f], len(y) == %d'\
1955              %(min(y.flat), max(y.flat), len(y.flat))
1956        print '    t [s] in [%f, %f], len(t) == %d'\
1957              %(min(times), max(times), len(times))
1958        print '  Quantities [SI units]:'
1959        for name in ['stage', 'xmomentum', 'ymomentum']:
1960            q = outfile.variables[name][:].flferret2swwat
1961            print '    %s in [%f, %f]' %(name, min(q), max(q))
1962
1963
1964
1965
1966    outfile.close()
1967
1968
1969
1970
1971def timefile2swww(filename, quantity_names = None):
1972    """Template for converting typical text files with time series to
1973    NetCDF sww file.
1974
1975
1976    The file format is assumed to be either two fields separated by a comma:
1977
1978        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
1979
1980    E.g
1981
1982      31/08/04 00:00:00, 1.328223 0 0
1983      31/08/04 00:15:00, 1.292912 0 0
1984
1985    will provide a time dependent function f(t) with three attributes
1986
1987    filename is assumed to be the rootname with extenisons .txt and .sww
1988    """
1989
1990    import time, calendar
1991    from Numeric import array
1992    from config import time_format
1993    from util import ensure_numeric
1994
1995
1996    fid = open(filename + '.txt')
1997    line = fid.readline()
1998    fid.close()
1999
2000    fields = line.split(',')
2001    msg = 'File %s must have the format date, value0 value1 value2 ...'
2002    assert len(fields) == 2, msg
2003
2004    try:
2005        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2006    except ValueError:
2007        msg = 'First field in file %s must be' %filename
2008        msg += ' date-time with format %s.\n' %time_format
2009        msg += 'I got %s instead.' %fields[0]
2010        raise msg
2011
2012
2013    #Split values
2014    values = []
2015    for value in fields[1].split():
2016        values.append(float(value))
2017
2018    q = ensure_numeric(values)
2019
2020    msg = 'ERROR: File must contain at least one independent value'
2021    assert len(q.shape) == 1, msg
2022
2023
2024
2025    #Read times proper
2026    from Numeric import zeros, Float, alltrue
2027    from config import time_format
2028    import time, calendar
2029
2030    fid = open(filename + '.txt')
2031    lines = fid.readlines()
2032    fid.close()
2033
2034    N = len(lines)
2035    d = len(q)
2036
2037    T = zeros(N, Float)       #Time
2038    Q = zeros((N, d), Float)  #Values
2039
2040    for i, line in enumerate(lines):
2041        fields = line.split(',')
2042        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2043       
2044        T[i] = realtime - starttime
2045
2046        for j, value in enumerate(fields[1].split()):
2047            Q[i, j] = float(value)
2048
2049    msg = 'File %s must list time as a monotonuosly ' %filename
2050    msg += 'increasing sequence'
2051    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2052
2053
2054    #Create sww file
2055    from Scientific.IO.NetCDF import NetCDFFile
2056
2057       
2058
2059    fid = NetCDFFile(filename + '.sww', 'w')
2060
2061
2062    fid.institution = 'Geoscience Australia'
2063    fid.description = 'Time series'
2064
2065
2066    #Reference point
2067    #Start time in seconds since the epoch (midnight 1/1/1970)
2068    #FIXME: Use Georef
2069    fid.starttime = starttime
2070   
2071   
2072    # dimension definitions
2073    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2074    #fid.createDimension('number_of_vertices', 3)
2075
2076
2077    fid.createDimension('number_of_timesteps', len(T))
2078
2079    #if geo_reference is not None:
2080    #    geo_reference.write_NetCDF(fid)
2081               
2082
2083    fid.createVariable('time', Float, ('number_of_timesteps',))
2084
2085    fid.variables['time'][:] = T
2086   
2087    for i in range(Q.shape[1]):
2088        try:
2089            name = quantity_names[i]
2090        except:   
2091            name = 'Attribute%d'%i
2092           
2093        fid.createVariable(name, Float, ('number_of_timesteps',))
2094        fid.variables[name][:] = Q[:,i]       
2095
2096    fid.close()
2097
2098
2099def extent_sww(file_name):
2100    """
2101    Read in an sww file.
2102
2103    Input;
2104    file_name - the sww file
2105
2106    Output;
2107    z - Vector of bed elevation
2108    volumes - Array.  Each row has 3 values, representing
2109    the vertices that define the volume
2110    time - Vector of the times where there is stage information
2111    stage - array with respect to time and vertices (x,y)
2112    """
2113
2114
2115    from Scientific.IO.NetCDF import NetCDFFile
2116
2117    #Check contents
2118    #Get NetCDF
2119    fid = NetCDFFile(file_name, 'r')
2120
2121    # Get the variables
2122    x = fid.variables['x'][:]
2123    y = fid.variables['y'][:]
2124    stage = fid.variables['stage'][:]
2125    #print "stage",stage
2126    #print "stage.shap",stage.shape
2127    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2128    #print "min(stage)",min(stage)
2129
2130    fid.close()
2131
2132    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2133
2134
2135def sww2domain(filename,boundary=None,t=None,\
2136               fail_if_NaN=True,NaN_filler=0\
2137               ,verbose = True,very_verbose = False):
2138    """
2139    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2140
2141    Boundary is not recommended if domian.smooth is not selected, as it
2142    uses unique coordinates, but not unique boundaries. This means that
2143    the boundary file will not be compatable with the coordiantes, and will
2144    give a different final boundary, or crash.
2145    """
2146    NaN=9.969209968386869e+036
2147    #initialise NaN.
2148
2149    from Scientific.IO.NetCDF import NetCDFFile
2150    from shallow_water import Domain
2151    from Numeric import asarray, transpose, resize
2152
2153    if verbose: print 'Reading from ', filename
2154    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2155    time = fid.variables['time']       #Timesteps
2156    if t is None:
2157        t = time[-1]
2158    time_interp = get_time_interp(time,t)
2159
2160    # Get the variables as Numeric arrays
2161    x = fid.variables['x'][:]             #x-coordinates of vertices
2162    y = fid.variables['y'][:]             #y-coordinates of vertices
2163    elevation = fid.variables['elevation']     #Elevation
2164    stage = fid.variables['stage']     #Water level
2165    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2166    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2167
2168    starttime = fid.starttime[0]
2169    volumes = fid.variables['volumes'][:] #Connectivity
2170    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2171
2172    conserved_quantities = []
2173    interpolated_quantities = {}
2174    other_quantities = []
2175
2176    # get geo_reference
2177    #sww files don't have to have a geo_ref
2178    try:
2179        geo_reference = Geo_reference(NetCDFObject=fid)
2180    except: #AttributeError, e:
2181        geo_reference = None
2182
2183    if verbose: print '    getting quantities'
2184    for quantity in fid.variables.keys():
2185        dimensions = fid.variables[quantity].dimensions
2186        if 'number_of_timesteps' in dimensions:
2187            conserved_quantities.append(quantity)
2188            interpolated_quantities[quantity]=\
2189                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2190        else: other_quantities.append(quantity)
2191
2192    other_quantities.remove('x')
2193    other_quantities.remove('y')
2194    other_quantities.remove('z')
2195    other_quantities.remove('volumes')
2196
2197    conserved_quantities.remove('time')
2198
2199    if verbose: print '    building domain'
2200#    From domain.Domain:
2201#    domain = Domain(coordinates, volumes,\
2202#                    conserved_quantities = conserved_quantities,\
2203#                    other_quantities = other_quantities,zone=zone,\
2204#                    xllcorner=xllcorner, yllcorner=yllcorner)
2205
2206#   From shallow_water.Domain:
2207    coordinates=coordinates.tolist()
2208    volumes=volumes.tolist()
2209    #FIXME:should this be in mesh?(peter row)
2210    if fid.smoothing == 'Yes': unique = False
2211    else: unique = True
2212    if unique:
2213        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2214
2215
2216    domain = Domain(coordinates, volumes, boundary)
2217
2218    if not boundary is None:
2219        domain.boundary = boundary
2220
2221    domain.geo_reference = geo_reference
2222
2223    domain.starttime=float(starttime)+float(t)
2224    domain.time=0.0
2225
2226    for quantity in other_quantities:
2227        try:
2228            NaN = fid.variables[quantity].missing_value
2229        except:
2230            pass #quantity has no missing_value number
2231        X = fid.variables[quantity][:]
2232        if very_verbose:
2233            print '       ',quantity
2234            print '        NaN =',NaN
2235            print '        max(X)'
2236            print '       ',max(X)
2237            print '        max(X)==NaN'
2238            print '       ',max(X)==NaN
2239            print ''
2240        if (max(X)==NaN) or (min(X)==NaN):
2241            if fail_if_NaN:
2242                msg = 'quantity "%s" contains no_data entry'%quantity
2243                raise msg
2244            else:
2245                data = (X<>NaN)
2246                X = (X*data)+(data==0)*NaN_filler
2247        if unique:
2248            X = resize(X,(len(X)/3,3))
2249        domain.set_quantity(quantity,X)
2250#
2251    for quantity in conserved_quantities:
2252        try:
2253            NaN = fid.variables[quantity].missing_value
2254        except:
2255            pass #quantity has no missing_value number
2256        X = interpolated_quantities[quantity]
2257        if very_verbose:
2258            print '       ',quantity
2259            print '        NaN =',NaN
2260            print '        max(X)'
2261            print '       ',max(X)
2262            print '        max(X)==NaN'
2263            print '       ',max(X)==NaN
2264            print ''
2265        if (max(X)==NaN) or (min(X)==NaN):
2266            if fail_if_NaN:
2267                msg = 'quantity "%s" contains no_data entry'%quantity
2268                raise msg
2269            else:
2270                data = (X<>NaN)
2271                X = (X*data)+(data==0)*NaN_filler
2272        if unique:
2273            X = resize(X,(X.shape[0]/3,3))
2274        domain.set_quantity(quantity,X)
2275    fid.close()
2276    return domain
2277
2278def interpolated_quantity(saved_quantity,time_interp):
2279
2280    #given an index and ratio, interpolate quantity with respect to time.
2281    index,ratio = time_interp
2282    Q = saved_quantity
2283    if ratio > 0:
2284        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2285    else:
2286        q = Q[index]
2287    #Return vector of interpolated values
2288    return q
2289
2290def get_time_interp(time,t=None):
2291    #Finds the ratio and index for time interpolation.
2292    #It is borrowed from previous pyvolution code.
2293    if t is None:
2294        t=time[-1]
2295        index = -1
2296        ratio = 0.
2297    else:
2298        T = time
2299        tau = t
2300        index=0
2301        msg = 'Time interval derived from file %s [%s:%s]'\
2302            %('FIXMEfilename', T[0], T[-1])
2303        msg += ' does not match model time: %s' %tau
2304        if tau < time[0]: raise msg
2305        if tau > time[-1]: raise msg
2306        while tau > time[index]: index += 1
2307        while tau < time[index]: index -= 1
2308        if tau == time[index]:
2309            #Protect against case where tau == time[-1] (last time)
2310            # - also works in general when tau == time[i]
2311            ratio = 0
2312        else:
2313            #t is now between index and index+1
2314            ratio = (tau - time[index])/(time[index+1] - time[index])
2315    return (index,ratio)
2316
2317
2318def weed(coordinates,volumes,boundary = None):
2319    if type(coordinates)=='array':
2320        coordinates = coordinates.tolist()
2321    if type(volumes)=='array':
2322        volumes = volumes.tolist()
2323
2324    unique = False
2325    point_dict = {}
2326    same_point = {}
2327    for i in range(len(coordinates)):
2328        point = tuple(coordinates[i])
2329        if point_dict.has_key(point):
2330            unique = True
2331            same_point[i]=point
2332            #to change all point i references to point j
2333        else:
2334            point_dict[point]=i
2335            same_point[i]=point
2336
2337    coordinates = []
2338    i = 0
2339    for point in point_dict.keys():
2340        point = tuple(point)
2341        coordinates.append(list(point))
2342        point_dict[point]=i
2343        i+=1
2344
2345
2346    for volume in volumes:
2347        for i in range(len(volume)):
2348            index = volume[i]
2349            if index>-1:
2350                volume[i]=point_dict[same_point[index]]
2351
2352    new_boundary = {}
2353    if not boundary is None:
2354        for segment in boundary.keys():
2355            point0 = point_dict[same_point[segment[0]]]
2356            point1 = point_dict[same_point[segment[1]]]
2357            label = boundary[segment]
2358            #FIXME should the bounday attributes be concaterated
2359            #('exterior, pond') or replaced ('pond')(peter row)
2360
2361            if new_boundary.has_key((point0,point1)):
2362                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2363                                              #+','+label
2364
2365            elif new_boundary.has_key((point1,point0)):
2366                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2367                                              #+','+label
2368            else: new_boundary[(point0,point1)]=label
2369
2370        boundary = new_boundary
2371
2372    return coordinates,volumes,boundary
2373
2374
2375
2376#OBSOLETE STUFF
2377#Native checkpoint format.
2378#Information needed to recreate a state is preserved
2379#FIXME: Rethink and maybe use netcdf format
2380def cpt_variable_writer(filename, t, v0, v1, v2):
2381    """Store all conserved quantities to file
2382    """
2383
2384    M, N = v0.shape
2385
2386    FN = create_filename(filename, 'cpt', M, t)
2387    #print 'Writing to %s' %FN
2388
2389    fid = open(FN, 'w')
2390    for i in range(M):
2391        for j in range(N):
2392            fid.write('%.16e ' %v0[i,j])
2393        for j in range(N):
2394            fid.write('%.16e ' %v1[i,j])
2395        for j in range(N):
2396            fid.write('%.16e ' %v2[i,j])
2397
2398        fid.write('\n')
2399    fid.close()
2400
2401
2402def cpt_variable_reader(filename, t, v0, v1, v2):
2403    """Store all conserved quantities to file
2404    """
2405
2406    M, N = v0.shape
2407
2408    FN = create_filename(filename, 'cpt', M, t)
2409    #print 'Reading from %s' %FN
2410
2411    fid = open(FN)
2412
2413
2414    for i in range(M):
2415        values = fid.readline().split() #Get one line
2416
2417        for j in range(N):
2418            v0[i,j] = float(values[j])
2419            v1[i,j] = float(values[3+j])
2420            v2[i,j] = float(values[6+j])
2421
2422    fid.close()
2423
2424def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2425    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2426    elevation.
2427    Not in use pt
2428    """
2429
2430    M, N = v0.shape
2431
2432    print X0
2433    import sys; sys.exit()
2434    FN = create_filename(filename, 'cpt', M)
2435    print 'Writing to %s' %FN
2436
2437    fid = open(FN, 'w')
2438    for i in range(M):
2439        for j in range(2):
2440            fid.write('%.16e ' %X0[i,j])   #x, y
2441        for j in range(N):
2442            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2443
2444        for j in range(2):
2445            fid.write('%.16e ' %X1[i,j])   #x, y
2446        for j in range(N):
2447            fid.write('%.16e ' %v1[i,j])
2448
2449        for j in range(2):
2450            fid.write('%.16e ' %X2[i,j])   #x, y
2451        for j in range(N):
2452            fid.write('%.16e ' %v2[i,j])
2453
2454        fid.write('\n')
2455    fid.close()
2456
2457
2458
2459#Function for storing out to e.g. visualisation
2460#FIXME: Do we want this?
2461#FIXME: Not done yet for this version
2462def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2463    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2464    """
2465
2466    M, N = v0.shape
2467
2468    FN = create_filename(filename, 'dat', M)
2469    #print 'Writing to %s' %FN
2470
2471    fid = open(FN, 'w')
2472    for i in range(M):
2473        for j in range(2):
2474            fid.write('%f ' %X0[i,j])   #x, y
2475        fid.write('%f ' %v0[i,0])       #z
2476
2477        for j in range(2):
2478            fid.write('%f ' %X1[i,j])   #x, y
2479        fid.write('%f ' %v1[i,0])       #z
2480
2481        for j in range(2):
2482            fid.write('%f ' %X2[i,j])   #x, y
2483        fid.write('%f ' %v2[i,0])       #z
2484
2485        fid.write('\n')
2486    fid.close()
2487
2488
2489
2490def dat_variable_writer(filename, t, v0, v1, v2):
2491    """Store water height to file
2492    """
2493
2494    M, N = v0.shape
2495
2496    FN = create_filename(filename, 'dat', M, t)
2497    #print 'Writing to %s' %FN
2498
2499    fid = open(FN, 'w')
2500    for i in range(M):
2501        fid.write('%.4f ' %v0[i,0])
2502        fid.write('%.4f ' %v1[i,0])
2503        fid.write('%.4f ' %v2[i,0])
2504
2505        fid.write('\n')
2506    fid.close()
2507
2508
2509def read_sww(filename):
2510    """Read sww Net CDF file containing Shallow Water Wave simulation
2511
2512    The integer array volumes is of shape Nx3 where N is the number of
2513    triangles in the mesh.
2514
2515    Each entry in volumes is an index into the x,y arrays (the location).
2516
2517    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2518    number_of_timesteps, number_of_points.
2519
2520    The momentum is not always stored.
2521
2522    """
2523    from Scientific.IO.NetCDF import NetCDFFile
2524    print 'Reading from ', filename
2525    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2526#latitude, longitude
2527    # Get the variables as Numeric arrays
2528    x = fid.variables['x']             #x-coordinates of vertices
2529    y = fid.variables['y']             #y-coordinates of vertices
2530    z = fid.variables['elevation']     #Elevation
2531    time = fid.variables['time']       #Timesteps
2532    stage = fid.variables['stage']     #Water level
2533    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2534    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2535
2536    volumes = fid.variables['volumes'] #Connectivity
2537
2538
2539def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2540                 verbose=False):
2541    """Read Digitial Elevation model from the following NetCDF format (.dem)
2542
2543    Example:
2544
2545    ncols         3121
2546    nrows         1800
2547    xllcorner     722000
2548    yllcorner     5893000
2549    cellsize      25
2550    NODATA_value  -9999
2551    138.3698 137.4194 136.5062 135.5558 ..........
2552
2553    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2554    """
2555
2556    import os
2557    from Scientific.IO.NetCDF import NetCDFFile
2558    from Numeric import Float, zeros, sum, reshape, equal
2559
2560    root = basename_in
2561    inname = root + '.dem'
2562
2563    #Open existing netcdf file to read
2564    infile = NetCDFFile(inname, 'r')
2565    if verbose: print 'Reading DEM from %s' %inname
2566
2567    #Read metadata
2568    ncols = infile.ncols[0]
2569    nrows = infile.nrows[0]
2570    xllcorner = infile.xllcorner[0]
2571    yllcorner = infile.yllcorner[0]
2572    cellsize = infile.cellsize[0]
2573    NODATA_value = infile.NODATA_value[0]
2574    zone = infile.zone[0]
2575    false_easting = infile.false_easting[0]
2576    false_northing = infile.false_northing[0]
2577    projection = infile.projection
2578    datum = infile.datum
2579    units = infile.units
2580
2581    dem_elevation = infile.variables['elevation']
2582
2583    #Get output file name
2584    if basename_out == None:
2585        outname = root + '_' + repr(cellsize_new) + '.dem'
2586    else:
2587        outname = basename_out + '.dem'
2588
2589    if verbose: print 'Write decimated NetCDF file to %s' %outname
2590
2591    #Determine some dimensions for decimated grid
2592    (nrows_stencil, ncols_stencil) = stencil.shape
2593    x_offset = ncols_stencil / 2
2594    y_offset = nrows_stencil / 2
2595    cellsize_ratio = int(cellsize_new / cellsize)
2596    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2597    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2598
2599    #Open netcdf file for output
2600    outfile = NetCDFFile(outname, 'w')
2601
2602    #Create new file
2603    outfile.institution = 'Geoscience Australia'
2604    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2605                           'of spatial point data'
2606    #Georeferencing
2607    outfile.zone = zone
2608    outfile.projection = projection
2609    outfile.datum = datum
2610    outfile.units = units
2611
2612    outfile.cellsize = cellsize_new
2613    outfile.NODATA_value = NODATA_value
2614    outfile.false_easting = false_easting
2615    outfile.false_northing = false_northing
2616
2617    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2618    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2619    outfile.ncols = ncols_new
2620    outfile.nrows = nrows_new
2621
2622    # dimension definition
2623    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2624
2625    # variable definition
2626    outfile.createVariable('elevation', Float, ('number_of_points',))
2627
2628    # Get handle to the variable
2629    elevation = outfile.variables['elevation']
2630
2631    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2632
2633    #Store data
2634    global_index = 0
2635    for i in range(nrows_new):
2636        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2637        lower_index = global_index
2638        telev =  zeros(ncols_new, Float)
2639        local_index = 0
2640        trow = i * cellsize_ratio
2641
2642        for j in range(ncols_new):
2643            tcol = j * cellsize_ratio
2644            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2645
2646            #if dem contains 1 or more NODATA_values set value in
2647            #decimated dem to NODATA_value, else compute decimated
2648            #value using stencil
2649            if sum(sum(equal(tmp, NODATA_value))) > 0:
2650                telev[local_index] = NODATA_value
2651            else:
2652                telev[local_index] = sum(sum(tmp * stencil))
2653
2654            global_index += 1
2655            local_index += 1
2656
2657        upper_index = global_index
2658
2659        elevation[lower_index:upper_index] = telev
2660
2661    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2662
2663    infile.close()
2664    outfile.close()
2665
Note: See TracBrowser for help on using the repository browser.