source: inundation/pyvolution/data_manager.py @ 1911

Last change on this file since 1911 was 1911, checked in by ole, 19 years ago

Refactored pyvolution to use polygon functionality from new utilities package

File size: 93.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51from Numeric import concatenate
52
53from coordinate_transforms.geo_reference import Geo_reference
54
55def make_filename(s):
56    """Transform argument string into a suitable filename
57    """
58
59    s = s.strip()
60    s = s.replace(' ', '_')
61    s = s.replace('(', '')
62    s = s.replace(')', '')
63    s = s.replace('__', '_')
64
65    return s
66
67
68def check_dir(path, verbose=None):
69    """Check that specified path exists.
70    If path does not exist it will be created if possible
71
72    USAGE:
73       checkdir(path, verbose):
74
75    ARGUMENTS:
76        path -- Directory
77        verbose -- Flag verbose output (default: None)
78
79    RETURN VALUE:
80        Verified path including trailing separator
81
82    """
83
84    import os, sys
85    import os.path
86
87    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
88        unix = 0
89    else:
90        unix = 1
91
92
93    if path[-1] != os.sep:
94        path = path + os.sep  # Add separator for directories
95
96    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
97    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
98        try:
99            exitcode=os.mkdir(path)
100
101            # Change access rights if possible
102            #
103            if unix:
104                exitcode=os.system('chmod 775 '+path)
105            else:
106                pass  # FIXME: What about acces rights under Windows?
107
108            if verbose: print 'MESSAGE: Directory', path, 'created.'
109
110        except:
111            print 'WARNING: Directory', path, 'could not be created.'
112            if unix:
113                path = '/tmp/'
114            else:
115                path = 'C:'
116
117            print 'Using directory %s instead' %path
118
119    return(path)
120
121
122
123def del_dir(path):
124    """Recursively delete directory path and all its contents
125    """
126
127    import os
128
129    if os.path.isdir(path):
130        for file in os.listdir(path):
131            X = os.path.join(path, file)
132
133
134            if os.path.isdir(X) and not os.path.islink(X):
135                del_dir(X)
136            else:
137                try:
138                    os.remove(X)
139                except:
140                    print "Could not remove file %s" %X
141
142        os.rmdir(path)
143
144
145
146def create_filename(datadir, filename, format, size=None, time=None):
147
148    import os
149    #from config import data_dir
150
151    FN = check_dir(datadir) + filename
152
153    if size is not None:
154        FN += '_size%d' %size
155
156    if time is not None:
157        FN += '_time%.2f' %time
158
159    FN += '.' + format
160    return FN
161
162
163def get_files(datadir, filename, format, size):
164    """Get all file (names) with gven name, size and format
165    """
166
167    import glob
168
169    import os
170    #from config import data_dir
171
172    dir = check_dir(datadir)
173
174    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
175    return glob.glob(pattern)
176
177
178
179#Generic class for storing output to e.g. visualisation or checkpointing
180class Data_format:
181    """Generic interface to data formats
182    """
183
184
185    def __init__(self, domain, extension, mode = 'w'):
186        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
187                                        '''   'w' (write)'''+\
188                                        '''   'r' (read)''' +\
189                                        '''   'a' (append)'''
190
191        #Create filename
192        #self.filename = create_filename(domain.get_datadir(),
193        #domain.get_name(), extension, len(domain))
194
195
196        self.filename = create_filename(domain.get_datadir(),
197                        domain.get_name(), extension)
198
199        #print 'F', self.filename
200        self.timestep = 0
201        self.number_of_volumes = len(domain)
202        self.domain = domain
203
204
205        #FIXME: Should we have a general set_precision function?
206
207
208
209#Class for storing output to e.g. visualisation
210class Data_format_sww(Data_format):
211    """Interface to native NetCDF format (.sww) for storing model output
212
213    There are two kinds of data
214
215    1: Constant data: Vertex coordinates and field values. Stored once
216    2: Variable data: Conserved quantities. Stored once per timestep.
217
218    All data is assumed to reside at vertex locations.
219    """
220
221
222    def __init__(self, domain, mode = 'w',\
223                 max_size = 2000000000,
224                 recursion = False):
225        from Scientific.IO.NetCDF import NetCDFFile
226        from Numeric import Int, Float, Float32
227
228        self.precision = Float32 #Use single precision
229        if hasattr(domain, 'max_size'):
230            self.max_size = domain.max_size #file size max is 2Gig
231        else:
232            self.max_size = max_size
233        self.recursion = recursion
234        self.mode = mode
235
236        Data_format.__init__(self, domain, 'sww', mode)
237
238
239        # NetCDF file definition
240        fid = NetCDFFile(self.filename, mode)
241
242        if mode == 'w':
243
244            #Create new file
245            fid.institution = 'Geoscience Australia'
246            fid.description = 'Output from pyvolution suitable for plotting'
247
248            if domain.smooth:
249                fid.smoothing = 'Yes'
250            else:
251                fid.smoothing = 'No'
252
253            fid.order = domain.default_order
254           
255            if hasattr(domain, 'texture'):                 
256                fid.texture = domain.texture
257            #else:                         
258            #    fid.texture = 'None'       
259
260            #Reference point
261            #Start time in seconds since the epoch (midnight 1/1/1970)
262            #FIXME: Use Georef
263            fid.starttime = domain.starttime
264
265            # dimension definitions
266            fid.createDimension('number_of_volumes', self.number_of_volumes)
267            fid.createDimension('number_of_vertices', 3)
268
269            if domain.smooth is True:
270                fid.createDimension('number_of_points', len(domain.vertexlist))
271            else:
272                fid.createDimension('number_of_points', 3*self.number_of_volumes)
273
274            fid.createDimension('number_of_timesteps', None) #extensible
275
276            # variable definitions
277            fid.createVariable('x', self.precision, ('number_of_points',))
278            fid.createVariable('y', self.precision, ('number_of_points',))
279            fid.createVariable('elevation', self.precision, ('number_of_points',))
280            if domain.geo_reference is not None:
281                domain.geo_reference.write_NetCDF(fid)
282               
283            #FIXME: Backwards compatibility
284            fid.createVariable('z', self.precision, ('number_of_points',))
285            #################################
286
287            fid.createVariable('volumes', Int, ('number_of_volumes',
288                                                'number_of_vertices'))
289
290            fid.createVariable('time', self.precision,
291                               ('number_of_timesteps',))
292
293            fid.createVariable('stage', self.precision,
294                               ('number_of_timesteps',
295                                'number_of_points'))
296
297            fid.createVariable('xmomentum', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('ymomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305        #Close
306        fid.close()
307
308
309    def store_connectivity(self):
310        """Specialisation of store_connectivity for net CDF format
311
312        Writes x,y,z coordinates of triangles constituting
313        the bed elevation.
314        """
315
316        from Scientific.IO.NetCDF import NetCDFFile
317
318        from Numeric import concatenate, Int
319
320        domain = self.domain
321
322        #Get NetCDF
323        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
324
325        # Get the variables
326        x = fid.variables['x']
327        y = fid.variables['y']
328        z = fid.variables['elevation']
329
330        volumes = fid.variables['volumes']
331
332        # Get X, Y and bed elevation Z
333        Q = domain.quantities['elevation']
334        X,Y,Z,V = Q.get_vertex_values(xy=True,
335                                      precision = self.precision)
336
337
338
339        x[:] = X.astype(self.precision)
340        y[:] = Y.astype(self.precision)
341        z[:] = Z.astype(self.precision)
342
343        #FIXME: Backwards compatibility
344        z = fid.variables['z']
345        z[:] = Z.astype(self.precision)
346        ################################
347
348        volumes[:] = V.astype(volumes.typecode())
349
350        #Close
351        fid.close()
352
353    def store_timestep(self, names):
354        """Store time and named quantities to file
355        """
356        from Scientific.IO.NetCDF import NetCDFFile
357        import types
358        from time import sleep
359        from os import stat
360
361        minimum_allowed_depth = 0.001
362        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
363        from Numeric import choose
364
365        #Get NetCDF
366        retries = 0
367        file_open = False
368        while not file_open and retries < 10:
369            try:
370                fid = NetCDFFile(self.filename, 'a') #Open existing file
371            except IOError:
372                #This could happen if someone was reading the file.
373                #In that case, wait a while and try again
374                msg = 'Warning (store_timestep): File %s could not be opened'\
375                      %self.filename
376                msg += ' - trying step %s again' %self.domain.time
377                print msg
378                retries += 1
379                sleep(1)
380            else:
381                file_open = True
382
383        if not file_open:
384            msg = 'File %s could not be opened for append' %self.filename
385            raise msg
386
387
388
389        #Check to see if the file is already too big:
390        time = fid.variables['time']
391        i = len(time)+1
392        file_size = stat(self.filename)[6]
393        file_size_increase =  file_size/i
394        if file_size + file_size_increase > self.max_size*(2**self.recursion):
395            #in order to get the file name and start time correct,
396            #I change the domian.filename and domain.starttime.
397            #This is the only way to do this without changing
398            #other modules (I think).
399
400            #write a filename addon that won't break swollens reader
401            #(10.sww is bad)
402            filename_ext = '_time_%s'%self.domain.time
403            filename_ext = filename_ext.replace('.', '_')
404            #remember the old filename, then give domain a
405            #name with the extension
406            old_domain_filename = self.domain.filename
407            if not self.recursion:
408                self.domain.filename = self.domain.filename+filename_ext
409
410            #change the domain starttime to the current time
411            old_domain_starttime = self.domain.starttime
412            self.domain.starttime = self.domain.time
413
414            #build a new data_structure.
415            next_data_structure=\
416                Data_format_sww(self.domain, mode=self.mode,\
417                                max_size = self.max_size,\
418                                recursion = self.recursion+1)
419            if not self.recursion:
420                print '    file_size = %s'%file_size
421                print '    saving file to %s'%next_data_structure.filename
422            #set up the new data_structure
423            self.domain.writer = next_data_structure
424
425            #FIXME - could be cleaner to use domain.store_timestep etc.
426            next_data_structure.store_connectivity()
427            next_data_structure.store_timestep(names)
428            fid.sync()
429            fid.close()
430
431            #restore the old starttime and filename
432            self.domain.starttime = old_domain_starttime
433            self.domain.filename = old_domain_filename
434        else:
435            self.recursion = False
436            domain = self.domain
437
438            # Get the variables
439            time = fid.variables['time']
440            stage = fid.variables['stage']
441            xmomentum = fid.variables['xmomentum']
442            ymomentum = fid.variables['ymomentum']
443            i = len(time)
444
445            #Store time
446            time[i] = self.domain.time
447
448
449            if type(names) not in [types.ListType, types.TupleType]:
450                names = [names]
451
452            for name in names:
453                # Get quantity
454                Q = domain.quantities[name]
455                A,V = Q.get_vertex_values(xy = False,
456                                          precision = self.precision)
457
458                #FIXME: Make this general (see below)
459                if name == 'stage':
460                    z = fid.variables['elevation']
461                    #print z[:]
462                    #print A-z[:]
463                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
464                    stage[i,:] = A.astype(self.precision)
465                elif name == 'xmomentum':
466                    xmomentum[i,:] = A.astype(self.precision)
467                elif name == 'ymomentum':
468                    ymomentum[i,:] = A.astype(self.precision)
469
470                #As in....
471                #eval( name + '[i,:] = A.astype(self.precision)' )
472                #FIXME: But we need a UNIT test for that before refactoring
473
474
475
476            #Flush and close
477            fid.sync()
478            fid.close()
479
480
481
482#Class for handling checkpoints data
483class Data_format_cpt(Data_format):
484    """Interface to native NetCDF format (.cpt)
485    """
486
487
488    def __init__(self, domain, mode = 'w'):
489        from Scientific.IO.NetCDF import NetCDFFile
490        from Numeric import Int, Float, Float
491
492        self.precision = Float #Use full precision
493
494        Data_format.__init__(self, domain, 'sww', mode)
495
496
497        # NetCDF file definition
498        fid = NetCDFFile(self.filename, mode)
499
500        if mode == 'w':
501            #Create new file
502            fid.institution = 'Geoscience Australia'
503            fid.description = 'Checkpoint data'
504            #fid.smooth = domain.smooth
505            fid.order = domain.default_order
506
507            # dimension definitions
508            fid.createDimension('number_of_volumes', self.number_of_volumes)
509            fid.createDimension('number_of_vertices', 3)
510
511            #Store info at all vertices (no smoothing)
512            fid.createDimension('number_of_points', 3*self.number_of_volumes)
513            fid.createDimension('number_of_timesteps', None) #extensible
514
515            # variable definitions
516
517            #Mesh
518            fid.createVariable('x', self.precision, ('number_of_points',))
519            fid.createVariable('y', self.precision, ('number_of_points',))
520
521
522            fid.createVariable('volumes', Int, ('number_of_volumes',
523                                                'number_of_vertices'))
524
525            fid.createVariable('time', self.precision,
526                               ('number_of_timesteps',))
527
528            #Allocate space for all quantities
529            for name in domain.quantities.keys():
530                fid.createVariable(name, self.precision,
531                                   ('number_of_timesteps',
532                                    'number_of_points'))
533
534        #Close
535        fid.close()
536
537
538    def store_checkpoint(self):
539        """
540        Write x,y coordinates of triangles.
541        Write connectivity (
542        constituting
543        the bed elevation.
544        """
545
546        from Scientific.IO.NetCDF import NetCDFFile
547
548        from Numeric import concatenate
549
550        domain = self.domain
551
552        #Get NetCDF
553        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
554
555        # Get the variables
556        x = fid.variables['x']
557        y = fid.variables['y']
558
559        volumes = fid.variables['volumes']
560
561        # Get X, Y and bed elevation Z
562        Q = domain.quantities['elevation']
563        X,Y,Z,V = Q.get_vertex_values(xy=True,
564                                      precision = self.precision)
565
566
567
568        x[:] = X.astype(self.precision)
569        y[:] = Y.astype(self.precision)
570        z[:] = Z.astype(self.precision)
571
572        volumes[:] = V
573
574        #Close
575        fid.close()
576
577
578    def store_timestep(self, name):
579        """Store time and named quantity to file
580        """
581        from Scientific.IO.NetCDF import NetCDFFile
582        from time import sleep
583
584        #Get NetCDF
585        retries = 0
586        file_open = False
587        while not file_open and retries < 10:
588            try:
589                fid = NetCDFFile(self.filename, 'a') #Open existing file
590            except IOError:
591                #This could happen if someone was reading the file.
592                #In that case, wait a while and try again
593                msg = 'Warning (store_timestep): File %s could not be opened'\
594                          %self.filename
595                msg += ' - trying again'
596                print msg
597                retries += 1
598                sleep(1)
599            else:
600                file_open = True
601
602            if not file_open:
603                msg = 'File %s could not be opened for append' %self.filename
604            raise msg
605
606
607        domain = self.domain
608
609        # Get the variables
610        time = fid.variables['time']
611        stage = fid.variables['stage']
612        i = len(time)
613
614        #Store stage
615        time[i] = self.domain.time
616
617        # Get quantity
618        Q = domain.quantities[name]
619        A,V = Q.get_vertex_values(xy=False,
620                                  precision = self.precision)
621
622        stage[i,:] = A.astype(self.precision)
623
624        #Flush and close
625        fid.sync()
626        fid.close()
627
628
629
630
631
632#Function for storing xya output
633#FIXME Not done yet for this version
634class Data_format_xya(Data_format):
635    """Generic interface to data formats
636    """
637
638
639    def __init__(self, domain, mode = 'w'):
640        from Scientific.IO.NetCDF import NetCDFFile
641        from Numeric import Int, Float, Float32
642
643        self.precision = Float32 #Use single precision
644
645        Data_format.__init__(self, domain, 'xya', mode)
646
647
648
649    #FIXME -This is the old xya format
650    def store_all(self):
651        """Specialisation of store all for xya format
652
653        Writes x,y,z coordinates of triangles constituting
654        the bed elevation.
655        """
656
657        from Numeric import concatenate
658
659        domain = self.domain
660
661        fd = open(self.filename, 'w')
662
663
664        if domain.smooth is True:
665            number_of_points =  len(domain.vertexlist)
666        else:
667            number_of_points = 3*self.number_of_volumes
668
669        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
670
671        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
672                 " # <vertex #> <x> <y> [attributes]" + "\n")
673
674
675        # Get X, Y, bed elevation and friction (index=0,1)
676        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
677                                           indices = (0,1), precision = self.precision)
678
679        bed_eles = A[:,0]
680        fricts = A[:,1]
681
682        # Get stage (index=0)
683        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
684                                       indices = (0,), precision = self.precision)
685
686        stages = B[:,0]
687
688        #<vertex #> <x> <y> [attributes]
689        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
690                                               stages, fricts):
691
692            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
693            fd.write(s)
694
695        #close
696        fd.close()
697
698
699    def store_timestep(self, t, V0, V1, V2):
700        """Store time, water heights (and momentums) to file
701        """
702        pass
703
704
705#Auxiliary
706def write_obj(filename,x,y,z):
707    """Store x,y,z vectors into filename (obj format)
708       Vectors are assumed to have dimension (M,3) where
709       M corresponds to the number elements.
710       triangles are assumed to be disconnected
711
712       The three numbers in each vector correspond to three vertices,
713
714       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
715
716    """
717    #print 'Writing obj to %s' % filename
718
719    import os.path
720
721    root, ext = os.path.splitext(filename)
722    if ext == '.obj':
723        FN = filename
724    else:
725        FN = filename + '.obj'
726
727
728    outfile = open(FN, 'wb')
729    outfile.write("# Triangulation as an obj file\n")
730
731    M, N = x.shape
732    assert N==3  #Assuming three vertices per element
733
734    for i in range(M):
735        for j in range(N):
736            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
737
738    for i in range(M):
739        base = i*N
740        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
741
742    outfile.close()
743
744
745#########################################################
746#Conversion routines
747########################################################
748
749def sww2obj(basefilename, size):
750    """Convert netcdf based data output to obj
751    """
752    from Scientific.IO.NetCDF import NetCDFFile
753
754    from Numeric import Float, zeros
755
756    #Get NetCDF
757    FN = create_filename('.', basefilename, 'sww', size)
758    print 'Reading from ', FN
759    fid = NetCDFFile(FN, 'r')  #Open existing file for read
760
761
762    # Get the variables
763    x = fid.variables['x']
764    y = fid.variables['y']
765    z = fid.variables['elevation']
766    time = fid.variables['time']
767    stage = fid.variables['stage']
768
769    M = size  #Number of lines
770    xx = zeros((M,3), Float)
771    yy = zeros((M,3), Float)
772    zz = zeros((M,3), Float)
773
774    for i in range(M):
775        for j in range(3):
776            xx[i,j] = x[i+j*M]
777            yy[i,j] = y[i+j*M]
778            zz[i,j] = z[i+j*M]
779
780    #Write obj for bathymetry
781    FN = create_filename('.', basefilename, 'obj', size)
782    write_obj(FN,xx,yy,zz)
783
784
785    #Now read all the data with variable information, combine with
786    #x,y info and store as obj
787
788    for k in range(len(time)):
789        t = time[k]
790        print 'Processing timestep %f' %t
791
792        for i in range(M):
793            for j in range(3):
794                zz[i,j] = stage[k,i+j*M]
795
796
797        #Write obj for variable data
798        #FN = create_filename(basefilename, 'obj', size, time=t)
799        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
800        write_obj(FN,xx,yy,zz)
801
802
803def dat2obj(basefilename):
804    """Convert line based data output to obj
805    FIXME: Obsolete?
806    """
807
808    import glob, os
809    from config import data_dir
810
811
812    #Get bathymetry and x,y's
813    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
814
815    from Numeric import zeros, Float
816
817    M = len(lines)  #Number of lines
818    x = zeros((M,3), Float)
819    y = zeros((M,3), Float)
820    z = zeros((M,3), Float)
821
822    ##i = 0
823    for i, line in enumerate(lines):
824        tokens = line.split()
825        values = map(float,tokens)
826
827        for j in range(3):
828            x[i,j] = values[j*3]
829            y[i,j] = values[j*3+1]
830            z[i,j] = values[j*3+2]
831
832        ##i += 1
833
834
835    #Write obj for bathymetry
836    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
837
838
839    #Now read all the data files with variable information, combine with
840    #x,y info
841    #and store as obj
842
843    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
844
845    for filename in files:
846        print 'Processing %s' % filename
847
848        lines = open(data_dir+os.sep+filename,'r').readlines()
849        assert len(lines) == M
850        root, ext = os.path.splitext(filename)
851
852        #Get time from filename
853        i0 = filename.find('_time=')
854        if i0 == -1:
855            #Skip bathymetry file
856            continue
857
858        i0 += 6  #Position where time starts
859        i1 = filename.find('.dat')
860
861        if i1 > i0:
862            t = float(filename[i0:i1])
863        else:
864            raise 'Hmmmm'
865
866
867
868        ##i = 0
869        for i, line in enumerate(lines):
870            tokens = line.split()
871            values = map(float,tokens)
872
873            for j in range(3):
874                z[i,j] = values[j]
875
876            ##i += 1
877
878        #Write obj for variable data
879        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
880
881
882def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
883    """Read netcdf filename1, pick timesteps first:step:last and save to
884    nettcdf file filename2
885    """
886    from Scientific.IO.NetCDF import NetCDFFile
887
888    #Get NetCDF
889    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
890    outfile = NetCDFFile(filename2, 'w')  #Open new file
891
892
893    #Copy dimensions
894    for d in infile.dimensions:
895        outfile.createDimension(d, infile.dimensions[d])
896
897    for name in infile.variables:
898        var = infile.variables[name]
899        outfile.createVariable(name, var.typecode(), var.dimensions)
900
901
902    #Copy the static variables
903    for name in infile.variables:
904        if name == 'time' or name == 'stage':
905            pass
906        else:
907            #Copy
908            outfile.variables[name][:] = infile.variables[name][:]
909
910    #Copy selected timesteps
911    time = infile.variables['time']
912    stage = infile.variables['stage']
913
914    newtime = outfile.variables['time']
915    newstage = outfile.variables['stage']
916
917    if last is None:
918        last = len(time)
919
920    selection = range(first, last, step)
921    for i, j in enumerate(selection):
922        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
923        newtime[i] = time[j]
924        newstage[i,:] = stage[j,:]
925
926    #Close
927    infile.close()
928    outfile.close()
929
930
931#Get data objects
932def get_dataobject(domain, mode='w'):
933    """Return instance of class of given format using filename
934    """
935
936    cls = eval('Data_format_%s' %domain.format)
937    return cls(domain, mode)
938
939def xya2pts(basename_in, basename_out=None, verbose=False,
940            #easting_min=None, easting_max=None,
941            #northing_min=None, northing_max=None,
942            stride = 1,           
943            attribute_name = 'elevation',
944            z_func = None):
945    """Read points data from ascii (.xya)
946
947    Example:
948
949              x(m)        y(m)        z(m)
950         0.00000e+00  0.00000e+00  1.3535000e-01
951         0.00000e+00  1.40000e-02  1.3535000e-01
952
953   
954
955    Convert to NetCDF pts format which is
956
957    points:  (Nx2) Float array
958    elevation: N Float array
959
960    Only lines that contain three numeric values are processed
961
962    If z_func is specified, it will be applied to the third field
963    """
964
965    import os 
966    #from Scientific.IO.NetCDF import NetCDFFile
967    from Numeric import Float, arrayrange, concatenate
968
969    root, ext = os.path.splitext(basename_in)
970
971    if ext == '': ext = '.xya'
972
973    #Get NetCDF
974    infile = open(root + ext, 'r')  #Open existing xya file for read
975
976    if verbose: print 'Reading xya points from %s' %(root + ext)
977
978    points = []
979    attribute = []
980    for i, line in enumerate(infile.readlines()):
981
982        if i % stride != 0: continue
983       
984        fields = line.split()
985
986        try:
987            assert len(fields) == 3
988        except:   
989            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
990
991        try:
992            x = float( fields[0] )
993            y = float( fields[1] )
994            z = float( fields[2] )           
995        except:
996            continue
997
998        points.append( [x, y] )
999
1000        if callable(z_func):
1001            attribute.append(z_func(z))           
1002        else:   
1003            attribute.append(z)
1004
1005
1006    #Get output file
1007    if basename_out == None:
1008        ptsname = root + '.pts'
1009    else:
1010        ptsname = basename_out + '.pts'
1011
1012    if verbose: print 'Store to NetCDF file %s' %ptsname
1013    write_ptsfile(ptsname, points, attribute, attribute_name)
1014
1015
1016def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1017    """Write points and associated attribute to pts (NetCDF) format
1018    """
1019
1020    from Numeric import Float
1021   
1022    if attribute_name is None:
1023        attribute_name = 'attribute'       
1024   
1025
1026    from Scientific.IO.NetCDF import NetCDFFile
1027   
1028    # NetCDF file definition
1029    outfile = NetCDFFile(ptsname, 'w')
1030
1031
1032    #Create new file
1033    outfile.institution = 'Geoscience Australia'
1034    outfile.description = 'NetCDF pts format for compact and '\
1035                          'portable storage of spatial point data'
1036   
1037
1038    #Georeferencing
1039    from coordinate_transforms.geo_reference import Geo_reference
1040    Geo_reference().write_NetCDF(outfile)
1041   
1042
1043    outfile.createDimension('number_of_points', len(points))
1044    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1045
1046    # variable definitions
1047    outfile.createVariable('points', Float, ('number_of_points',
1048                                             'number_of_dimensions'))
1049    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1050
1051    # Get handles to the variables
1052    nc_points = outfile.variables['points']
1053    nc_attribute = outfile.variables[attribute_name]
1054
1055    #Store data
1056    nc_points[:, :] = points
1057    nc_attribute[:] = attribute
1058
1059    outfile.close()
1060   
1061
1062def dem2pts(basename_in, basename_out=None, verbose=False,
1063            easting_min=None, easting_max=None,
1064            northing_min=None, northing_max=None):
1065    """Read Digitial Elevation model from the following NetCDF format (.dem)
1066
1067    Example:
1068
1069    ncols         3121
1070    nrows         1800
1071    xllcorner     722000
1072    yllcorner     5893000
1073    cellsize      25
1074    NODATA_value  -9999
1075    138.3698 137.4194 136.5062 135.5558 ..........
1076
1077    Convert to NetCDF pts format which is
1078
1079    points:  (Nx2) Float array
1080    elevation: N Float array
1081    """
1082
1083    #FIXME: Can this be written feasibly using write_pts?
1084
1085    import os
1086    from Scientific.IO.NetCDF import NetCDFFile
1087    from Numeric import Float, zeros, reshape
1088
1089    root = basename_in
1090
1091    #Get NetCDF
1092    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1093
1094    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1095
1096    ncols = infile.ncols[0]
1097    nrows = infile.nrows[0]
1098    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1099    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1100    cellsize = infile.cellsize[0]
1101    NODATA_value = infile.NODATA_value[0]
1102    dem_elevation = infile.variables['elevation']
1103
1104    zone = infile.zone[0]
1105    false_easting = infile.false_easting[0]
1106    false_northing = infile.false_northing[0]
1107
1108    #Text strings
1109    projection = infile.projection
1110    datum = infile.datum
1111    units = infile.units
1112
1113
1114    #Get output file
1115    if basename_out == None:
1116        ptsname = root + '.pts'
1117    else:
1118        ptsname = basename_out + '.pts'
1119
1120    if verbose: print 'Store to NetCDF file %s' %ptsname
1121    # NetCDF file definition
1122    outfile = NetCDFFile(ptsname, 'w')
1123
1124    #Create new file
1125    outfile.institution = 'Geoscience Australia'
1126    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1127                      'of spatial point data'
1128    #assign default values
1129    if easting_min is None: easting_min = xllcorner
1130    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1131    if northing_min is None: northing_min = yllcorner
1132    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1133
1134    #compute offsets to update georeferencing
1135    easting_offset = xllcorner - easting_min
1136    northing_offset = yllcorner - northing_min
1137
1138    #Georeferencing
1139    outfile.zone = zone
1140    outfile.xllcorner = easting_min #Easting of lower left corner
1141    outfile.yllcorner = northing_min #Northing of lower left corner
1142    outfile.false_easting = false_easting
1143    outfile.false_northing = false_northing
1144
1145    outfile.projection = projection
1146    outfile.datum = datum
1147    outfile.units = units
1148
1149
1150    #Grid info (FIXME: probably not going to be used, but heck)
1151    outfile.ncols = ncols
1152    outfile.nrows = nrows
1153
1154
1155    # dimension definitions
1156    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1157    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1158    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1159    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1160
1161    # variable definitions
1162    outfile.createVariable('points', Float, ('number_of_points',
1163                                             'number_of_dimensions'))
1164    outfile.createVariable('elevation', Float, ('number_of_points',))
1165
1166    # Get handles to the variables
1167    points = outfile.variables['points']
1168    elevation = outfile.variables['elevation']
1169
1170    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1171
1172    #Store data
1173    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1174    global_index = 0
1175    for i in range(nrows):
1176        if verbose and i%((nrows+10)/10)==0:
1177            print 'Processing row %d of %d' %(i, nrows)       
1178       
1179        lower_index = global_index
1180        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1181        telev =  zeros(ncols_in_bounding_box, Float)
1182        local_index = 0
1183
1184        y = (nrows-i)*cellsize + yllcorner
1185        for j in range(ncols):
1186
1187            x = j*cellsize + xllcorner
1188            if easting_min <= x <= easting_max and \
1189               northing_min <= y <= northing_max:
1190                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1191                telev[local_index] = dem_elevation_r[i, j]
1192                global_index += 1
1193                local_index += 1
1194
1195        upper_index = global_index
1196
1197        if upper_index == lower_index + ncols_in_bounding_box:
1198            points[lower_index:upper_index, :] = tpoints
1199            elevation[lower_index:upper_index] = telev
1200
1201    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1202
1203    infile.close()
1204    outfile.close()
1205
1206
1207
1208def sww2dem(basename_in, basename_out = None,
1209            quantity = None,
1210            timestep = None,
1211            reduction = None,
1212            cellsize = 10,
1213            easting_min = None,
1214            easting_max = None,
1215            northing_min = None,
1216            northing_max = None,         
1217            verbose = False,
1218            origin = None,
1219            datum = 'WGS84',
1220            format = 'ers'):
1221   
1222    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1223
1224    Example (ASC):
1225
1226    ncols         3121
1227    nrows         1800
1228    xllcorner     722000
1229    yllcorner     5893000
1230    cellsize      25
1231    NODATA_value  -9999
1232    138.3698 137.4194 136.5062 135.5558 ..........
1233
1234    Also write accompanying file with same basename_in but extension .prj
1235    used to fix the UTM zone, datum, false northings and eastings.
1236
1237    The prj format is assumed to be as
1238
1239    Projection    UTM
1240    Zone          56
1241    Datum         WGS84
1242    Zunits        NO
1243    Units         METERS
1244    Spheroid      WGS84
1245    Xshift        0.0000000000
1246    Yshift        10000000.0000000000
1247    Parameters
1248
1249
1250    if quantity is given, out values from quantity otherwise default to
1251    elevation
1252
1253    if timestep (an index) is given, output quantity at that timestep
1254
1255    if reduction is given use that to reduce quantity over all timesteps.
1256
1257    datum
1258   
1259    format can be either 'asc' or 'ers'
1260    """
1261
1262    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1263    from utilities.polygon import inside_polygon
1264   
1265    msg = 'Format must be either asc or ers'
1266    assert format.lower() in ['asc', 'ers'], msg
1267   
1268    false_easting = 500000
1269    false_northing = 10000000
1270
1271    if quantity is None:
1272        quantity = 'elevation'
1273
1274    if reduction is None:
1275        reduction = max
1276
1277    if basename_out is None:
1278        basename_out = basename_in + '_%s' %quantity
1279 
1280    swwfile = basename_in + '.sww'
1281    demfile = basename_out + '.' + format
1282    # Note the use of a .ers extension is optional (write_ermapper_grid will
1283    # deal with either option
1284
1285    #Read sww file
1286    if verbose: print 'Reading from %s' %swwfile
1287    from Scientific.IO.NetCDF import NetCDFFile
1288    fid = NetCDFFile(swwfile)
1289
1290    #Get extent and reference
1291    x = fid.variables['x'][:]
1292    y = fid.variables['y'][:]
1293    volumes = fid.variables['volumes'][:]
1294
1295    number_of_timesteps = fid.dimensions['number_of_timesteps']
1296    number_of_points = fid.dimensions['number_of_points']
1297    if origin is None:
1298
1299        #Get geo_reference
1300        #sww files don't have to have a geo_ref
1301        try:
1302            geo_reference = Geo_reference(NetCDFObject=fid)
1303        except AttributeError, e:
1304            geo_reference = Geo_reference() #Default georef object
1305
1306        xllcorner = geo_reference.get_xllcorner()
1307        yllcorner = geo_reference.get_yllcorner()
1308        zone = geo_reference.get_zone()
1309    else:
1310        zone = origin[0]
1311        xllcorner = origin[1]
1312        yllcorner = origin[2]
1313
1314
1315
1316    #Get quantity and reduce if applicable
1317    if verbose: print 'Reading quantity %s' %quantity
1318
1319    if quantity.lower() == 'depth':
1320        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
1321    else:
1322        q = fid.variables[quantity][:]
1323
1324
1325    if len(q.shape) == 2:
1326        #q has a time component and needs to be reduced along
1327        #the temporal dimension
1328        if verbose: print 'Reducing quantity %s' %quantity
1329        q_reduced = zeros( number_of_points, Float )
1330
1331        for k in range(number_of_points):
1332            q_reduced[k] = reduction( q[:,k] )
1333
1334        q = q_reduced
1335
1336    #Post condition: Now q has dimension: number_of_points
1337    assert len(q.shape) == 1
1338    assert q.shape[0] == number_of_points   
1339   
1340
1341    #Create grid and update xll/yll corner and x,y
1342
1343    #Relative extent
1344    if easting_min is None:
1345        xmin = min(x)
1346    else:
1347        xmin = easting_min - xllcorner
1348
1349    if easting_max is None:
1350        xmax = max(x)
1351    else:
1352        xmax = easting_max - xllcorner
1353       
1354    if northing_min is None:       
1355        ymin = min(y)
1356    else:
1357        ymin = northing_min - yllcorner
1358       
1359    if northing_max is None:       
1360        ymax = max(y)
1361    else:
1362        ymax = northing_max - yllcorner
1363       
1364       
1365   
1366    if verbose: print 'Creating grid'
1367    ncols = int((xmax-xmin)/cellsize)+1
1368    nrows = int((ymax-ymin)/cellsize)+1
1369
1370
1371    #New absolute reference and coordinates
1372    newxllcorner = xmin+xllcorner
1373    newyllcorner = ymin+yllcorner
1374
1375    x = x+xllcorner-newxllcorner
1376    y = y+yllcorner-newyllcorner
1377
1378    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1379    assert len(vertex_points.shape) == 2
1380
1381
1382    from Numeric import zeros, Float
1383    grid_points = zeros ( (ncols*nrows, 2), Float )
1384
1385
1386    for i in xrange(nrows):
1387        if format.lower() == 'asc':
1388            yg = i*cellsize
1389        else:   
1390            #this will flip the order of the y values for ers
1391            yg = (nrows-i)*cellsize
1392           
1393        for j in xrange(ncols):
1394            xg = j*cellsize
1395            k = i*ncols + j
1396
1397            grid_points[k,0] = xg
1398            grid_points[k,1] = yg
1399
1400    #Interpolate
1401    from least_squares import Interpolation
1402   
1403
1404    #FIXME: This should be done with precrop = True (?), otherwise it'll
1405    #take forever. With expand_search  set to False, some grid points might
1406    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1407
1408    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1409                           precrop = False, expand_search = False,
1410                           verbose = verbose)
1411
1412    #Interpolate using quantity values
1413    if verbose: print 'Interpolating'
1414    grid_values = interp.interpolate(q).flat
1415
1416    if format.lower() == 'ers':
1417        # setup ERS header information
1418        grid_values = reshape(grid_values,(nrows, ncols))   
1419        NODATA_value = 0
1420        header = {}
1421        header['datum'] = '"' + datum + '"'
1422        # FIXME The use of hardwired UTM and zone number needs to be made optional
1423        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1424        header['projection'] = '"UTM-' + str(zone) + '"'
1425        header['coordinatetype'] = 'EN'
1426        if header['coordinatetype'] == 'LL':
1427            header['longitude'] = str(newxllcorner)
1428            header['latitude'] = str(newyllcorner)
1429        elif header['coordinatetype'] == 'EN':
1430            header['eastings'] = str(newxllcorner)
1431            header['northings'] = str(newyllcorner)
1432        header['nullcellvalue'] = str(NODATA_value)
1433        header['xdimension'] = str(cellsize)
1434        header['ydimension'] = str(cellsize)
1435        header['value'] = '"' + quantity + '"'
1436
1437
1438        #Write
1439        if verbose: print 'Writing %s' %demfile
1440        import ermapper_grids
1441        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1442
1443        fid.close()   
1444    else:
1445        #Write to Ascii format
1446       
1447        #Write prj file
1448        prjfile = basename_out + '.prj' 
1449       
1450        if verbose: print 'Writing %s' %prjfile
1451        prjid = open(prjfile, 'w')
1452        prjid.write('Projection    %s\n' %'UTM')
1453        prjid.write('Zone          %d\n' %zone)
1454        prjid.write('Datum         %s\n' %datum)
1455        prjid.write('Zunits        NO\n')
1456        prjid.write('Units         METERS\n')
1457        prjid.write('Spheroid      %s\n' %datum)
1458        prjid.write('Xshift        %d\n' %false_easting)
1459        prjid.write('Yshift        %d\n' %false_northing)
1460        prjid.write('Parameters\n')
1461        prjid.close()
1462
1463
1464   
1465        if verbose: print 'Writing %s' %ascfile
1466        NODATA_value = -9999
1467 
1468        ascid = open(demfile, 'w')
1469
1470        ascid.write('ncols         %d\n' %ncols)
1471        ascid.write('nrows         %d\n' %nrows)
1472        ascid.write('xllcorner     %d\n' %newxllcorner)
1473        ascid.write('yllcorner     %d\n' %newyllcorner)
1474        ascid.write('cellsize      %f\n' %cellsize)
1475        ascid.write('NODATA_value  %d\n' %NODATA_value)
1476
1477
1478        #Get bounding polygon from mesh
1479        P = interp.mesh.get_boundary_polygon()
1480        inside_indices = inside_polygon(grid_points, P)
1481
1482        for i in range(nrows):
1483            if verbose and i%((nrows+10)/10)==0:
1484                print 'Doing row %d of %d' %(i, nrows)
1485
1486            for j in range(ncols):
1487                index = (nrows-i-1)*ncols+j
1488
1489                if sometrue(inside_indices == index):
1490                    ascid.write('%f ' %grid_values[index])
1491                else:
1492                    ascid.write('%d ' %NODATA_value)
1493
1494            ascid.write('\n')
1495
1496        #Close
1497        ascid.close()
1498        fid.close()
1499
1500#Backwards compatibility       
1501def sww2asc(basename_in, basename_out = None,
1502            quantity = None,
1503            timestep = None,
1504            reduction = None,
1505            cellsize = 10,
1506            verbose = False,
1507            origin = None):
1508    print 'sww2asc will soon be obsoleted - please use sww2dem'
1509    sww2dem(basename_in,
1510            basename_out = basename_out,
1511            quantity = quantity,
1512            timestep = timestep,
1513            reduction = reduction,
1514            cellsize = cellsize,
1515            verbose = verbose,
1516            origin = origin,
1517            datum = 'WGS84',
1518            format = 'asc')
1519           
1520def sww2ers(basename_in, basename_out = None,
1521            quantity = None,
1522            timestep = None,
1523            reduction = None,
1524            cellsize = 10,
1525            verbose = False,
1526            origin = None,
1527            datum = 'WGS84'):
1528    print 'sww2ers will soon be obsoleted - please use sww2dem'
1529    sww2dem(basename_in, 
1530            basename_out = basename_out,
1531            quantity = quantity,
1532            timestep = timestep,
1533            reduction = reduction,
1534            cellsize = cellsize,
1535            verbose = verbose,
1536            origin = origin,
1537            datum = datum,
1538            format = 'ers')         
1539################################# END COMPATIBILITY ##############         
1540
1541
1542
1543
1544def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1545                                  verbose=False):
1546    """Read Digitial Elevation model from the following ASCII format (.asc)
1547
1548    Example:
1549
1550    ncols         3121
1551    nrows         1800
1552    xllcorner     722000
1553    yllcorner     5893000
1554    cellsize      25
1555    NODATA_value  -9999
1556    138.3698 137.4194 136.5062 135.5558 ..........
1557
1558    Convert basename_in + '.asc' to NetCDF format (.dem)
1559    mimicking the ASCII format closely.
1560
1561
1562    An accompanying file with same basename_in but extension .prj must exist
1563    and is used to fix the UTM zone, datum, false northings and eastings.
1564
1565    The prj format is assumed to be as
1566
1567    Projection    UTM
1568    Zone          56
1569    Datum         WGS84
1570    Zunits        NO
1571    Units         METERS
1572    Spheroid      WGS84
1573    Xshift        0.0000000000
1574    Yshift        10000000.0000000000
1575    Parameters
1576    """
1577
1578    import os
1579    from Scientific.IO.NetCDF import NetCDFFile
1580    from Numeric import Float, array
1581
1582    #root, ext = os.path.splitext(basename_in)
1583    root = basename_in
1584
1585    ###########################################
1586    # Read Meta data
1587    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1588    metadatafile = open(root + '.prj')
1589    metalines = metadatafile.readlines()
1590    metadatafile.close()
1591
1592    L = metalines[0].strip().split()
1593    assert L[0].strip().lower() == 'projection'
1594    projection = L[1].strip()                   #TEXT
1595
1596    L = metalines[1].strip().split()
1597    assert L[0].strip().lower() == 'zone'
1598    zone = int(L[1].strip())
1599
1600    L = metalines[2].strip().split()
1601    assert L[0].strip().lower() == 'datum'
1602    datum = L[1].strip()                        #TEXT
1603
1604    L = metalines[3].strip().split()
1605    assert L[0].strip().lower() == 'zunits'     #IGNORE
1606    zunits = L[1].strip()                       #TEXT
1607
1608    L = metalines[4].strip().split()
1609    assert L[0].strip().lower() == 'units'
1610    units = L[1].strip()                        #TEXT
1611
1612    L = metalines[5].strip().split()
1613    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1614    spheroid = L[1].strip()                     #TEXT
1615
1616    L = metalines[6].strip().split()
1617    assert L[0].strip().lower() == 'xshift'
1618    false_easting = float(L[1].strip())
1619
1620    L = metalines[7].strip().split()
1621    assert L[0].strip().lower() == 'yshift'
1622    false_northing = float(L[1].strip())
1623
1624    #print false_easting, false_northing, zone, datum
1625
1626
1627    ###########################################
1628    #Read DEM data
1629
1630    datafile = open(basename_in + '.asc')
1631
1632    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1633    lines = datafile.readlines()
1634    datafile.close()
1635
1636    if verbose: print 'Got', len(lines), ' lines'
1637
1638    ncols = int(lines[0].split()[1].strip())
1639    nrows = int(lines[1].split()[1].strip())
1640    xllcorner = float(lines[2].split()[1].strip())
1641    yllcorner = float(lines[3].split()[1].strip())
1642    cellsize = float(lines[4].split()[1].strip())
1643    NODATA_value = int(lines[5].split()[1].strip())
1644
1645    assert len(lines) == nrows + 6
1646
1647
1648    ##########################################
1649
1650
1651    if basename_out == None:
1652        netcdfname = root + '.dem'
1653    else:
1654        netcdfname = basename_out + '.dem'
1655
1656    if verbose: print 'Store to NetCDF file %s' %netcdfname
1657    # NetCDF file definition
1658    fid = NetCDFFile(netcdfname, 'w')
1659
1660    #Create new file
1661    fid.institution = 'Geoscience Australia'
1662    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1663                      'of spatial point data'
1664
1665    fid.ncols = ncols
1666    fid.nrows = nrows
1667    fid.xllcorner = xllcorner
1668    fid.yllcorner = yllcorner
1669    fid.cellsize = cellsize
1670    fid.NODATA_value = NODATA_value
1671
1672    fid.zone = zone
1673    fid.false_easting = false_easting
1674    fid.false_northing = false_northing
1675    fid.projection = projection
1676    fid.datum = datum
1677    fid.units = units
1678
1679
1680    # dimension definitions
1681    fid.createDimension('number_of_rows', nrows)
1682    fid.createDimension('number_of_columns', ncols)
1683
1684    # variable definitions
1685    fid.createVariable('elevation', Float, ('number_of_rows',
1686                                            'number_of_columns'))
1687
1688    # Get handles to the variables
1689    elevation = fid.variables['elevation']
1690
1691    #Store data
1692    n = len(lines[6:])
1693    for i, line in enumerate(lines[6:]):
1694        fields = line.split()
1695        if verbose and i%((n+10)/10)==0:
1696            print 'Processing row %d of %d' %(i, nrows)           
1697
1698        elevation[i, :] = array([float(x) for x in fields])
1699
1700    fid.close()
1701
1702
1703
1704
1705
1706def ferret2sww(basename_in, basename_out = None,
1707               verbose = False,
1708               minlat = None, maxlat = None,
1709               minlon = None, maxlon = None,
1710               mint = None, maxt = None, mean_stage = 0,
1711               origin = None, zscale = 1,
1712               fail_on_NaN = True,
1713               NaN_filler = 0,
1714               elevation = None,
1715               inverted_bathymetry = False
1716               ): #FIXME: Bathymetry should be obtained
1717                                  #from MOST somehow.
1718                                  #Alternatively from elsewhere
1719                                  #or, as a last resort,
1720                                  #specified here.
1721                                  #The value of -100 will work
1722                                  #for the Wollongong tsunami
1723                                  #scenario but is very hacky
1724    """Convert 'Ferret' NetCDF format for wave propagation to
1725    sww format native to pyvolution.
1726
1727    Specify only basename_in and read files of the form
1728    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1729    relative height, x-velocity and y-velocity, respectively.
1730
1731    Also convert latitude and longitude to UTM. All coordinates are
1732    assumed to be given in the GDA94 datum.
1733
1734    min's and max's: If omitted - full extend is used.
1735    To include a value min may equal it, while max must exceed it.
1736    Lat and lon are assuemd to be in decimal degrees
1737
1738    origin is a 3-tuple with geo referenced
1739    UTM coordinates (zone, easting, northing)
1740
1741    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1742    which means that longitude is the fastest
1743    varying dimension (row major order, so to speak)
1744
1745    ferret2sww uses grid points as vertices in a triangular grid
1746    counting vertices from lower left corner upwards, then right
1747    """
1748
1749    import os
1750    from Scientific.IO.NetCDF import NetCDFFile
1751    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1752    from Numeric import allclose, around
1753       
1754    precision = Float
1755
1756
1757    #Get NetCDF data
1758    if verbose: print 'Reading files %s_*.nc' %basename_in
1759    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1760    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1761    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1762    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1763
1764    if basename_out is None:
1765        swwname = basename_in + '.sww'
1766    else:
1767        swwname = basename_out + '.sww'
1768
1769    times = file_h.variables['TIME']
1770    latitudes = file_h.variables['LAT']
1771    longitudes = file_h.variables['LON']
1772
1773
1774
1775    #Precision used by most for lat/lon is 4 or 5 decimals
1776    e_lat = around(file_e.variables['LAT'][:], 5)
1777    e_lon = around(file_e.variables['LON'][:], 5)
1778
1779    #Check that files are compatible
1780    assert allclose(latitudes, file_u.variables['LAT'])
1781    assert allclose(latitudes, file_v.variables['LAT'])
1782    assert allclose(latitudes, e_lat)
1783
1784    assert allclose(longitudes, file_u.variables['LON'])
1785    assert allclose(longitudes, file_v.variables['LON'])
1786    assert allclose(longitudes, e_lon)   
1787
1788
1789
1790    if mint == None:
1791        jmin = 0
1792    else:
1793        jmin = searchsorted(times, mint)
1794
1795    if maxt == None:
1796        jmax=len(times)
1797    else:
1798        jmax = searchsorted(times, maxt)
1799
1800    if minlat == None:
1801        kmin=0
1802    else:
1803        kmin = searchsorted(latitudes, minlat)
1804
1805    if maxlat == None:
1806        kmax = len(latitudes)
1807    else:
1808        kmax = searchsorted(latitudes, maxlat)
1809
1810    if minlon == None:
1811        lmin=0
1812    else:
1813        lmin = searchsorted(longitudes, minlon)
1814
1815    if maxlon == None:
1816        lmax = len(longitudes)
1817    else:
1818        lmax = searchsorted(longitudes, maxlon)
1819
1820
1821
1822    times = times[jmin:jmax]
1823    latitudes = latitudes[kmin:kmax]
1824    longitudes = longitudes[lmin:lmax]
1825
1826
1827    if verbose: print 'cropping'
1828    zname = 'ELEVATION'
1829
1830   
1831    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1832    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1833    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1834    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1835
1836#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1837#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1838#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1839#        from Numeric import asarray
1840#        elevations=elevations.tolist()
1841#        elevations.reverse()
1842#        elevations=asarray(elevations)
1843#    else:
1844#        from Numeric import asarray
1845#        elevations=elevations.tolist()
1846#        elevations.reverse()
1847#        elevations=asarray(elevations)
1848#        'print hmmm'
1849
1850
1851
1852    #Get missing values
1853    nan_ha = file_h.variables['HA'].missing_value[0]
1854    nan_ua = file_u.variables['UA'].missing_value[0]
1855    nan_va = file_v.variables['VA'].missing_value[0]
1856    if hasattr(file_e.variables[zname],'missing_value'):
1857        nan_e  = file_e.variables[zname].missing_value[0]
1858    else:
1859        nan_e = None
1860
1861    #Cleanup
1862    from Numeric import sometrue
1863
1864    missing = (amplitudes == nan_ha)
1865    if sometrue (missing):
1866        if fail_on_NaN:
1867            msg = 'NetCDFFile %s contains missing values'\
1868                  %(basename_in+'_ha.nc')
1869            raise msg
1870        else:
1871            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1872
1873    missing = (uspeed == nan_ua)
1874    if sometrue (missing):
1875        if fail_on_NaN:
1876            msg = 'NetCDFFile %s contains missing values'\
1877                  %(basename_in+'_ua.nc')
1878            raise msg
1879        else:
1880            uspeed = uspeed*(missing==0) + missing*NaN_filler
1881
1882    missing = (vspeed == nan_va)
1883    if sometrue (missing):
1884        if fail_on_NaN:
1885            msg = 'NetCDFFile %s contains missing values'\
1886                  %(basename_in+'_va.nc')
1887            raise msg
1888        else:
1889            vspeed = vspeed*(missing==0) + missing*NaN_filler
1890
1891
1892    missing = (elevations == nan_e)
1893    if sometrue (missing):
1894        if fail_on_NaN:
1895            msg = 'NetCDFFile %s contains missing values'\
1896                  %(basename_in+'_e.nc')
1897            raise msg
1898        else:
1899            elevations = elevations*(missing==0) + missing*NaN_filler
1900
1901    #######
1902
1903
1904
1905    number_of_times = times.shape[0]
1906    number_of_latitudes = latitudes.shape[0]
1907    number_of_longitudes = longitudes.shape[0]
1908
1909    assert amplitudes.shape[0] == number_of_times
1910    assert amplitudes.shape[1] == number_of_latitudes
1911    assert amplitudes.shape[2] == number_of_longitudes
1912
1913    if verbose:
1914        print '------------------------------------------------'
1915        print 'Statistics:'
1916        print '  Extent (lat/lon):'
1917        print '    lat in [%f, %f], len(lat) == %d'\
1918              %(min(latitudes.flat), max(latitudes.flat),
1919                len(latitudes.flat))
1920        print '    lon in [%f, %f], len(lon) == %d'\
1921              %(min(longitudes.flat), max(longitudes.flat),
1922                len(longitudes.flat))
1923        print '    t in [%f, %f], len(t) == %d'\
1924              %(min(times.flat), max(times.flat), len(times.flat))
1925
1926        q = amplitudes.flat
1927        name = 'Amplitudes (ha) [cm]'
1928        print %s in [%f, %f]' %(name, min(q), max(q))
1929
1930        q = uspeed.flat
1931        name = 'Speeds (ua) [cm/s]'
1932        print %s in [%f, %f]' %(name, min(q), max(q))
1933
1934        q = vspeed.flat
1935        name = 'Speeds (va) [cm/s]'
1936        print %s in [%f, %f]' %(name, min(q), max(q))
1937
1938        q = elevations.flat
1939        name = 'Elevations (e) [m]'
1940        print %s in [%f, %f]' %(name, min(q), max(q))
1941
1942
1943    #print number_of_latitudes, number_of_longitudes
1944    number_of_points = number_of_latitudes*number_of_longitudes
1945    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1946
1947
1948    file_h.close()
1949    file_u.close()
1950    file_v.close()
1951    file_e.close()
1952
1953
1954    # NetCDF file definition
1955    outfile = NetCDFFile(swwname, 'w')
1956
1957    #Create new file
1958    outfile.institution = 'Geoscience Australia'
1959    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1960                          %(basename_in + '_ha.nc',
1961                            basename_in + '_ua.nc',
1962                            basename_in + '_va.nc',
1963                            basename_in + '_e.nc')
1964
1965
1966    #For sww compatibility
1967    outfile.smoothing = 'Yes'
1968    outfile.order = 1
1969
1970    #Start time in seconds since the epoch (midnight 1/1/1970)
1971    outfile.starttime = starttime = times[0]
1972    times = times - starttime  #Store relative times
1973
1974    # dimension definitions
1975    outfile.createDimension('number_of_volumes', number_of_volumes)
1976
1977    outfile.createDimension('number_of_vertices', 3)
1978    outfile.createDimension('number_of_points', number_of_points)
1979
1980
1981    #outfile.createDimension('number_of_timesteps', len(times))
1982    outfile.createDimension('number_of_timesteps', len(times))
1983
1984    # variable definitions
1985    outfile.createVariable('x', precision, ('number_of_points',))
1986    outfile.createVariable('y', precision, ('number_of_points',))
1987    outfile.createVariable('elevation', precision, ('number_of_points',))
1988
1989    #FIXME: Backwards compatibility
1990    outfile.createVariable('z', precision, ('number_of_points',))
1991    #################################
1992
1993    outfile.createVariable('volumes', Int, ('number_of_volumes',
1994                                            'number_of_vertices'))
1995
1996    outfile.createVariable('time', precision,
1997                           ('number_of_timesteps',))
1998
1999    outfile.createVariable('stage', precision,
2000                           ('number_of_timesteps',
2001                            'number_of_points'))
2002
2003    outfile.createVariable('xmomentum', precision,
2004                           ('number_of_timesteps',
2005                            'number_of_points'))
2006
2007    outfile.createVariable('ymomentum', precision,
2008                           ('number_of_timesteps',
2009                            'number_of_points'))
2010
2011
2012    #Store
2013    from coordinate_transforms.redfearn import redfearn
2014    x = zeros(number_of_points, Float)  #Easting
2015    y = zeros(number_of_points, Float)  #Northing
2016
2017
2018    if verbose: print 'Making triangular grid'
2019    #Check zone boundaries
2020    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2021
2022    vertices = {}
2023    i = 0
2024    for k, lat in enumerate(latitudes):       #Y direction
2025        for l, lon in enumerate(longitudes):  #X direction
2026
2027            vertices[l,k] = i
2028
2029            zone, easting, northing = redfearn(lat,lon)
2030
2031            msg = 'Zone boundary crossed at longitude =', lon
2032            #assert zone == refzone, msg
2033            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2034            x[i] = easting
2035            y[i] = northing
2036            i += 1
2037
2038
2039    #Construct 2 triangles per 'rectangular' element
2040    volumes = []
2041    for l in range(number_of_longitudes-1):    #X direction
2042        for k in range(number_of_latitudes-1): #Y direction
2043            v1 = vertices[l,k+1]
2044            v2 = vertices[l,k]
2045            v3 = vertices[l+1,k+1]
2046            v4 = vertices[l+1,k]
2047
2048            volumes.append([v1,v2,v3]) #Upper element
2049            volumes.append([v4,v3,v2]) #Lower element
2050
2051    volumes = array(volumes)
2052
2053    if origin == None:
2054        zone = refzone
2055        xllcorner = min(x)
2056        yllcorner = min(y)
2057    else:
2058        zone = origin[0]
2059        xllcorner = origin[1]
2060        yllcorner = origin[2]
2061
2062
2063    outfile.xllcorner = xllcorner
2064    outfile.yllcorner = yllcorner
2065    outfile.zone = zone
2066
2067
2068    if elevation is not None:
2069        z = elevation
2070    else:
2071        if inverted_bathymetry:
2072            z = -1*elevations
2073        else:
2074            z = elevations
2075        #FIXME: z should be obtained from MOST and passed in here
2076
2077    from Numeric import resize
2078    z = resize(z,outfile.variables['z'][:].shape)
2079    outfile.variables['x'][:] = x - xllcorner
2080    outfile.variables['y'][:] = y - yllcorner
2081    outfile.variables['z'][:] = z
2082    outfile.variables['elevation'][:] = z  #FIXME HACK
2083    outfile.variables['time'][:] = times   #Store time relative
2084    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2085
2086
2087
2088    #Time stepping
2089    stage = outfile.variables['stage']
2090    xmomentum = outfile.variables['xmomentum']
2091    ymomentum = outfile.variables['ymomentum']
2092
2093    if verbose: print 'Converting quantities'
2094    n = len(times)
2095    for j in range(n):
2096        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2097        i = 0
2098        for k in range(number_of_latitudes):      #Y direction
2099            for l in range(number_of_longitudes): #X direction
2100                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2101                stage[j,i] = w
2102                h = w - z[i]
2103                xmomentum[j,i] = uspeed[j,k,l]/100*h
2104                ymomentum[j,i] = vspeed[j,k,l]/100*h
2105                i += 1
2106
2107
2108    if verbose:
2109        x = outfile.variables['x'][:]
2110        y = outfile.variables['y'][:]
2111        print '------------------------------------------------'
2112        print 'Statistics of output file:'
2113        print '  Name: %s' %swwname
2114        print '  Reference:'
2115        print '    Lower left corner: [%f, %f]'\
2116              %(xllcorner, yllcorner)
2117        print '    Start time: %f' %starttime
2118        print '  Extent:'
2119        print '    x [m] in [%f, %f], len(x) == %d'\
2120              %(min(x.flat), max(x.flat), len(x.flat))
2121        print '    y [m] in [%f, %f], len(y) == %d'\
2122              %(min(y.flat), max(y.flat), len(y.flat))
2123        print '    t [s] in [%f, %f], len(t) == %d'\
2124              %(min(times), max(times), len(times))
2125        print '  Quantities [SI units]:'
2126        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2127            q = outfile.variables[name][:].flat
2128            print '    %s in [%f, %f]' %(name, min(q), max(q))
2129
2130
2131
2132
2133    outfile.close()
2134
2135
2136
2137
2138def timefile2netcdf(filename, quantity_names = None):
2139    """Template for converting typical text files with time series to
2140    NetCDF tms file.
2141
2142
2143    The file format is assumed to be either two fields separated by a comma:
2144
2145        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2146
2147    E.g
2148
2149      31/08/04 00:00:00, 1.328223 0 0
2150      31/08/04 00:15:00, 1.292912 0 0
2151
2152    will provide a time dependent function f(t) with three attributes
2153
2154    filename is assumed to be the rootname with extenisons .txt and .sww
2155    """
2156
2157    import time, calendar
2158    from Numeric import array
2159    from config import time_format
2160    from util import ensure_numeric
2161
2162
2163    fid = open(filename + '.txt')
2164    line = fid.readline()
2165    fid.close()
2166
2167    fields = line.split(',')
2168    msg = 'File %s must have the format date, value0 value1 value2 ...'
2169    assert len(fields) == 2, msg
2170
2171    try:
2172        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2173    except ValueError:
2174        msg = 'First field in file %s must be' %filename
2175        msg += ' date-time with format %s.\n' %time_format
2176        msg += 'I got %s instead.' %fields[0]
2177        raise msg
2178
2179
2180    #Split values
2181    values = []
2182    for value in fields[1].split():
2183        values.append(float(value))
2184
2185    q = ensure_numeric(values)
2186
2187    msg = 'ERROR: File must contain at least one independent value'
2188    assert len(q.shape) == 1, msg
2189
2190
2191
2192    #Read times proper
2193    from Numeric import zeros, Float, alltrue
2194    from config import time_format
2195    import time, calendar
2196
2197    fid = open(filename + '.txt')
2198    lines = fid.readlines()
2199    fid.close()
2200
2201    N = len(lines)
2202    d = len(q)
2203
2204    T = zeros(N, Float)       #Time
2205    Q = zeros((N, d), Float)  #Values
2206
2207    for i, line in enumerate(lines):
2208        fields = line.split(',')
2209        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2210       
2211        T[i] = realtime - starttime
2212
2213        for j, value in enumerate(fields[1].split()):
2214            Q[i, j] = float(value)
2215
2216    msg = 'File %s must list time as a monotonuosly ' %filename
2217    msg += 'increasing sequence'
2218    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2219
2220
2221    #Create NetCDF file
2222    from Scientific.IO.NetCDF import NetCDFFile
2223
2224    fid = NetCDFFile(filename + '.tms', 'w')
2225
2226
2227    fid.institution = 'Geoscience Australia'
2228    fid.description = 'Time series'
2229
2230
2231    #Reference point
2232    #Start time in seconds since the epoch (midnight 1/1/1970)
2233    #FIXME: Use Georef
2234    fid.starttime = starttime
2235   
2236   
2237    # dimension definitions
2238    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2239    #fid.createDimension('number_of_vertices', 3)
2240
2241
2242    fid.createDimension('number_of_timesteps', len(T))
2243
2244    fid.createVariable('time', Float, ('number_of_timesteps',))
2245
2246    fid.variables['time'][:] = T
2247   
2248    for i in range(Q.shape[1]):
2249        try:
2250            name = quantity_names[i]
2251        except:   
2252            name = 'Attribute%d'%i
2253           
2254        fid.createVariable(name, Float, ('number_of_timesteps',))
2255        fid.variables[name][:] = Q[:,i]       
2256
2257    fid.close()
2258
2259
2260def extent_sww(file_name):
2261    """
2262    Read in an sww file.
2263
2264    Input;
2265    file_name - the sww file
2266
2267    Output;
2268    z - Vector of bed elevation
2269    volumes - Array.  Each row has 3 values, representing
2270    the vertices that define the volume
2271    time - Vector of the times where there is stage information
2272    stage - array with respect to time and vertices (x,y)
2273    """
2274
2275
2276    from Scientific.IO.NetCDF import NetCDFFile
2277
2278    #Check contents
2279    #Get NetCDF
2280    fid = NetCDFFile(file_name, 'r')
2281
2282    # Get the variables
2283    x = fid.variables['x'][:]
2284    y = fid.variables['y'][:]
2285    stage = fid.variables['stage'][:]
2286    #print "stage",stage
2287    #print "stage.shap",stage.shape
2288    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2289    #print "min(stage)",min(stage)
2290
2291    fid.close()
2292
2293    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2294
2295
2296def sww2domain(filename,boundary=None,t=None,\
2297               fail_if_NaN=True,NaN_filler=0\
2298               ,verbose = True,very_verbose = False):
2299    """
2300    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2301
2302    Boundary is not recommended if domian.smooth is not selected, as it
2303    uses unique coordinates, but not unique boundaries. This means that
2304    the boundary file will not be compatable with the coordiantes, and will
2305    give a different final boundary, or crash.
2306    """
2307    NaN=9.969209968386869e+036
2308    #initialise NaN.
2309
2310    from Scientific.IO.NetCDF import NetCDFFile
2311    from shallow_water import Domain
2312    from Numeric import asarray, transpose, resize
2313
2314    if verbose: print 'Reading from ', filename
2315    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2316    time = fid.variables['time']       #Timesteps
2317    if t is None:
2318        t = time[-1]
2319    time_interp = get_time_interp(time,t)
2320
2321    # Get the variables as Numeric arrays
2322    x = fid.variables['x'][:]             #x-coordinates of vertices
2323    y = fid.variables['y'][:]             #y-coordinates of vertices
2324    elevation = fid.variables['elevation']     #Elevation
2325    stage = fid.variables['stage']     #Water level
2326    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2327    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2328
2329    starttime = fid.starttime[0]
2330    volumes = fid.variables['volumes'][:] #Connectivity
2331    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2332
2333    conserved_quantities = []
2334    interpolated_quantities = {}
2335    other_quantities = []
2336
2337    # get geo_reference
2338    #sww files don't have to have a geo_ref
2339    try:
2340        geo_reference = Geo_reference(NetCDFObject=fid)
2341    except: #AttributeError, e:
2342        geo_reference = None
2343
2344    if verbose: print '    getting quantities'
2345    for quantity in fid.variables.keys():
2346        dimensions = fid.variables[quantity].dimensions
2347        if 'number_of_timesteps' in dimensions:
2348            conserved_quantities.append(quantity)
2349            interpolated_quantities[quantity]=\
2350                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2351        else: other_quantities.append(quantity)
2352
2353    other_quantities.remove('x')
2354    other_quantities.remove('y')
2355    other_quantities.remove('z')
2356    other_quantities.remove('volumes')
2357
2358    conserved_quantities.remove('time')
2359
2360    if verbose: print '    building domain'
2361#    From domain.Domain:
2362#    domain = Domain(coordinates, volumes,\
2363#                    conserved_quantities = conserved_quantities,\
2364#                    other_quantities = other_quantities,zone=zone,\
2365#                    xllcorner=xllcorner, yllcorner=yllcorner)
2366
2367#   From shallow_water.Domain:
2368    coordinates=coordinates.tolist()
2369    volumes=volumes.tolist()
2370    #FIXME:should this be in mesh?(peter row)
2371    if fid.smoothing == 'Yes': unique = False
2372    else: unique = True
2373    if unique:
2374        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2375
2376
2377    domain = Domain(coordinates, volumes, boundary)
2378
2379    if not boundary is None:
2380        domain.boundary = boundary
2381
2382    domain.geo_reference = geo_reference
2383
2384    domain.starttime=float(starttime)+float(t)
2385    domain.time=0.0
2386
2387    for quantity in other_quantities:
2388        try:
2389            NaN = fid.variables[quantity].missing_value
2390        except:
2391            pass #quantity has no missing_value number
2392        X = fid.variables[quantity][:]
2393        if very_verbose:
2394            print '       ',quantity
2395            print '        NaN =',NaN
2396            print '        max(X)'
2397            print '       ',max(X)
2398            print '        max(X)==NaN'
2399            print '       ',max(X)==NaN
2400            print ''
2401        if (max(X)==NaN) or (min(X)==NaN):
2402            if fail_if_NaN:
2403                msg = 'quantity "%s" contains no_data entry'%quantity
2404                raise msg
2405            else:
2406                data = (X<>NaN)
2407                X = (X*data)+(data==0)*NaN_filler
2408        if unique:
2409            X = resize(X,(len(X)/3,3))
2410        domain.set_quantity(quantity,X)
2411#
2412    for quantity in conserved_quantities:
2413        try:
2414            NaN = fid.variables[quantity].missing_value
2415        except:
2416            pass #quantity has no missing_value number
2417        X = interpolated_quantities[quantity]
2418        if very_verbose:
2419            print '       ',quantity
2420            print '        NaN =',NaN
2421            print '        max(X)'
2422            print '       ',max(X)
2423            print '        max(X)==NaN'
2424            print '       ',max(X)==NaN
2425            print ''
2426        if (max(X)==NaN) or (min(X)==NaN):
2427            if fail_if_NaN:
2428                msg = 'quantity "%s" contains no_data entry'%quantity
2429                raise msg
2430            else:
2431                data = (X<>NaN)
2432                X = (X*data)+(data==0)*NaN_filler
2433        if unique:
2434            X = resize(X,(X.shape[0]/3,3))
2435        domain.set_quantity(quantity,X)
2436    fid.close()
2437    return domain
2438
2439def interpolated_quantity(saved_quantity,time_interp):
2440
2441    #given an index and ratio, interpolate quantity with respect to time.
2442    index,ratio = time_interp
2443    Q = saved_quantity
2444    if ratio > 0:
2445        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2446    else:
2447        q = Q[index]
2448    #Return vector of interpolated values
2449    return q
2450
2451def get_time_interp(time,t=None):
2452    #Finds the ratio and index for time interpolation.
2453    #It is borrowed from previous pyvolution code.
2454    if t is None:
2455        t=time[-1]
2456        index = -1
2457        ratio = 0.
2458    else:
2459        T = time
2460        tau = t
2461        index=0
2462        msg = 'Time interval derived from file %s [%s:%s]'\
2463            %('FIXMEfilename', T[0], T[-1])
2464        msg += ' does not match model time: %s' %tau
2465        if tau < time[0]: raise msg
2466        if tau > time[-1]: raise msg
2467        while tau > time[index]: index += 1
2468        while tau < time[index]: index -= 1
2469        if tau == time[index]:
2470            #Protect against case where tau == time[-1] (last time)
2471            # - also works in general when tau == time[i]
2472            ratio = 0
2473        else:
2474            #t is now between index and index+1
2475            ratio = (tau - time[index])/(time[index+1] - time[index])
2476    return (index,ratio)
2477
2478
2479def weed(coordinates,volumes,boundary = None):
2480    if type(coordinates)=='array':
2481        coordinates = coordinates.tolist()
2482    if type(volumes)=='array':
2483        volumes = volumes.tolist()
2484
2485    unique = False
2486    point_dict = {}
2487    same_point = {}
2488    for i in range(len(coordinates)):
2489        point = tuple(coordinates[i])
2490        if point_dict.has_key(point):
2491            unique = True
2492            same_point[i]=point
2493            #to change all point i references to point j
2494        else:
2495            point_dict[point]=i
2496            same_point[i]=point
2497
2498    coordinates = []
2499    i = 0
2500    for point in point_dict.keys():
2501        point = tuple(point)
2502        coordinates.append(list(point))
2503        point_dict[point]=i
2504        i+=1
2505
2506
2507    for volume in volumes:
2508        for i in range(len(volume)):
2509            index = volume[i]
2510            if index>-1:
2511                volume[i]=point_dict[same_point[index]]
2512
2513    new_boundary = {}
2514    if not boundary is None:
2515        for segment in boundary.keys():
2516            point0 = point_dict[same_point[segment[0]]]
2517            point1 = point_dict[same_point[segment[1]]]
2518            label = boundary[segment]
2519            #FIXME should the bounday attributes be concaterated
2520            #('exterior, pond') or replaced ('pond')(peter row)
2521
2522            if new_boundary.has_key((point0,point1)):
2523                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2524                                              #+','+label
2525
2526            elif new_boundary.has_key((point1,point0)):
2527                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2528                                              #+','+label
2529            else: new_boundary[(point0,point1)]=label
2530
2531        boundary = new_boundary
2532
2533    return coordinates,volumes,boundary
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544###############################################
2545#OBSOLETE STUFF
2546#Native checkpoint format.
2547#Information needed to recreate a state is preserved
2548#FIXME: Rethink and maybe use netcdf format
2549def cpt_variable_writer(filename, t, v0, v1, v2):
2550    """Store all conserved quantities to file
2551    """
2552
2553    M, N = v0.shape
2554
2555    FN = create_filename(filename, 'cpt', M, t)
2556    #print 'Writing to %s' %FN
2557
2558    fid = open(FN, 'w')
2559    for i in range(M):
2560        for j in range(N):
2561            fid.write('%.16e ' %v0[i,j])
2562        for j in range(N):
2563            fid.write('%.16e ' %v1[i,j])
2564        for j in range(N):
2565            fid.write('%.16e ' %v2[i,j])
2566
2567        fid.write('\n')
2568    fid.close()
2569
2570
2571def cpt_variable_reader(filename, t, v0, v1, v2):
2572    """Store all conserved quantities to file
2573    """
2574
2575    M, N = v0.shape
2576
2577    FN = create_filename(filename, 'cpt', M, t)
2578    #print 'Reading from %s' %FN
2579
2580    fid = open(FN)
2581
2582
2583    for i in range(M):
2584        values = fid.readline().split() #Get one line
2585
2586        for j in range(N):
2587            v0[i,j] = float(values[j])
2588            v1[i,j] = float(values[3+j])
2589            v2[i,j] = float(values[6+j])
2590
2591    fid.close()
2592
2593def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2594    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2595    elevation.
2596    FIXME: Not in use pt
2597    """
2598
2599    M, N = v0.shape
2600
2601
2602    print X0
2603    import sys; sys.exit()
2604    FN = create_filename(filename, 'cpt', M)
2605    print 'Writing to %s' %FN
2606
2607    fid = open(FN, 'w')
2608    for i in range(M):
2609        for j in range(2):
2610            fid.write('%.16e ' %X0[i,j])   #x, y
2611        for j in range(N):
2612            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2613
2614        for j in range(2):
2615            fid.write('%.16e ' %X1[i,j])   #x, y
2616        for j in range(N):
2617            fid.write('%.16e ' %v1[i,j])
2618
2619        for j in range(2):
2620            fid.write('%.16e ' %X2[i,j])   #x, y
2621        for j in range(N):
2622            fid.write('%.16e ' %v2[i,j])
2623
2624        fid.write('\n')
2625    fid.close()
2626
2627
2628
2629#Function for storing out to e.g. visualisation
2630#FIXME: Do we want this?
2631#FIXME: Not done yet for this version
2632def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2633    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2634    """
2635
2636    M, N = v0.shape
2637
2638    FN = create_filename(filename, 'dat', M)
2639    #print 'Writing to %s' %FN
2640
2641    fid = open(FN, 'w')
2642    for i in range(M):
2643        for j in range(2):
2644            fid.write('%f ' %X0[i,j])   #x, y
2645        fid.write('%f ' %v0[i,0])       #z
2646
2647        for j in range(2):
2648            fid.write('%f ' %X1[i,j])   #x, y
2649        fid.write('%f ' %v1[i,0])       #z
2650
2651        for j in range(2):
2652            fid.write('%f ' %X2[i,j])   #x, y
2653        fid.write('%f ' %v2[i,0])       #z
2654
2655        fid.write('\n')
2656    fid.close()
2657
2658
2659
2660def dat_variable_writer(filename, t, v0, v1, v2):
2661    """Store water height to file
2662    """
2663
2664    M, N = v0.shape
2665
2666    FN = create_filename(filename, 'dat', M, t)
2667    #print 'Writing to %s' %FN
2668
2669    fid = open(FN, 'w')
2670    for i in range(M):
2671        fid.write('%.4f ' %v0[i,0])
2672        fid.write('%.4f ' %v1[i,0])
2673        fid.write('%.4f ' %v2[i,0])
2674
2675        fid.write('\n')
2676    fid.close()
2677
2678
2679def read_sww(filename):
2680    """Read sww Net CDF file containing Shallow Water Wave simulation
2681
2682    The integer array volumes is of shape Nx3 where N is the number of
2683    triangles in the mesh.
2684
2685    Each entry in volumes is an index into the x,y arrays (the location).
2686
2687    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2688    number_of_timesteps, number_of_points.
2689
2690    The momentum is not always stored.
2691
2692    """
2693    from Scientific.IO.NetCDF import NetCDFFile
2694    print 'Reading from ', filename
2695    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2696#latitude, longitude
2697    # Get the variables as Numeric arrays
2698    x = fid.variables['x']             #x-coordinates of vertices
2699    y = fid.variables['y']             #y-coordinates of vertices
2700    z = fid.variables['elevation']     #Elevation
2701    time = fid.variables['time']       #Timesteps
2702    stage = fid.variables['stage']     #Water level
2703    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2704    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2705
2706    volumes = fid.variables['volumes'] #Connectivity
2707
2708
2709def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2710                 verbose=False):
2711    """Read Digitial Elevation model from the following NetCDF format (.dem)
2712
2713    Example:
2714
2715    ncols         3121
2716    nrows         1800
2717    xllcorner     722000
2718    yllcorner     5893000
2719    cellsize      25
2720    NODATA_value  -9999
2721    138.3698 137.4194 136.5062 135.5558 ..........
2722
2723    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2724    """
2725
2726    import os
2727    from Scientific.IO.NetCDF import NetCDFFile
2728    from Numeric import Float, zeros, sum, reshape, equal
2729
2730    root = basename_in
2731    inname = root + '.dem'
2732
2733    #Open existing netcdf file to read
2734    infile = NetCDFFile(inname, 'r')
2735    if verbose: print 'Reading DEM from %s' %inname
2736
2737    #Read metadata
2738    ncols = infile.ncols[0]
2739    nrows = infile.nrows[0]
2740    xllcorner = infile.xllcorner[0]
2741    yllcorner = infile.yllcorner[0]
2742    cellsize = infile.cellsize[0]
2743    NODATA_value = infile.NODATA_value[0]
2744    zone = infile.zone[0]
2745    false_easting = infile.false_easting[0]
2746    false_northing = infile.false_northing[0]
2747    projection = infile.projection
2748    datum = infile.datum
2749    units = infile.units
2750
2751    dem_elevation = infile.variables['elevation']
2752
2753    #Get output file name
2754    if basename_out == None:
2755        outname = root + '_' + repr(cellsize_new) + '.dem'
2756    else:
2757        outname = basename_out + '.dem'
2758
2759    if verbose: print 'Write decimated NetCDF file to %s' %outname
2760
2761    #Determine some dimensions for decimated grid
2762    (nrows_stencil, ncols_stencil) = stencil.shape
2763    x_offset = ncols_stencil / 2
2764    y_offset = nrows_stencil / 2
2765    cellsize_ratio = int(cellsize_new / cellsize)
2766    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2767    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2768
2769    #Open netcdf file for output
2770    outfile = NetCDFFile(outname, 'w')
2771
2772    #Create new file
2773    outfile.institution = 'Geoscience Australia'
2774    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2775                           'of spatial point data'
2776    #Georeferencing
2777    outfile.zone = zone
2778    outfile.projection = projection
2779    outfile.datum = datum
2780    outfile.units = units
2781
2782    outfile.cellsize = cellsize_new
2783    outfile.NODATA_value = NODATA_value
2784    outfile.false_easting = false_easting
2785    outfile.false_northing = false_northing
2786
2787    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2788    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2789    outfile.ncols = ncols_new
2790    outfile.nrows = nrows_new
2791
2792    # dimension definition
2793    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2794
2795    # variable definition
2796    outfile.createVariable('elevation', Float, ('number_of_points',))
2797
2798    # Get handle to the variable
2799    elevation = outfile.variables['elevation']
2800
2801    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2802
2803    #Store data
2804    global_index = 0
2805    for i in range(nrows_new):
2806        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2807        lower_index = global_index
2808        telev =  zeros(ncols_new, Float)
2809        local_index = 0
2810        trow = i * cellsize_ratio
2811
2812        for j in range(ncols_new):
2813            tcol = j * cellsize_ratio
2814            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2815
2816            #if dem contains 1 or more NODATA_values set value in
2817            #decimated dem to NODATA_value, else compute decimated
2818            #value using stencil
2819            if sum(sum(equal(tmp, NODATA_value))) > 0:
2820                telev[local_index] = NODATA_value
2821            else:
2822                telev[local_index] = sum(sum(tmp * stencil))
2823
2824            global_index += 1
2825            local_index += 1
2826
2827        upper_index = global_index
2828
2829        elevation[lower_index:upper_index] = telev
2830
2831    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2832
2833    infile.close()
2834    outfile.close()
2835
2836
2837   
2838def sww2asc_obsolete(basename_in, basename_out = None,
2839            quantity = None,
2840            timestep = None,
2841            reduction = None,
2842            cellsize = 10,
2843            verbose = False,
2844            origin = None):
2845    """Read SWW file and convert to Digitial Elevation model format (.asc)
2846
2847    Example:
2848
2849    ncols         3121
2850    nrows         1800
2851    xllcorner     722000
2852    yllcorner     5893000
2853    cellsize      25
2854    NODATA_value  -9999
2855    138.3698 137.4194 136.5062 135.5558 ..........
2856
2857    Also write accompanying file with same basename_in but extension .prj
2858    used to fix the UTM zone, datum, false northings and eastings.
2859
2860    The prj format is assumed to be as
2861
2862    Projection    UTM
2863    Zone          56
2864    Datum         WGS84
2865    Zunits        NO
2866    Units         METERS
2867    Spheroid      WGS84
2868    Xshift        0.0000000000
2869    Yshift        10000000.0000000000
2870    Parameters
2871
2872
2873    if quantity is given, out values from quantity otherwise default to
2874    elevation
2875
2876    if timestep (an index) is given, output quantity at that timestep
2877
2878    if reduction is given use that to reduce quantity over all timesteps.
2879
2880    """
2881    from Numeric import array, Float, concatenate, NewAxis, zeros,\
2882         sometrue
2883    from utilities.polygon import inside_polygon
2884
2885    #FIXME: Should be variable
2886    datum = 'WGS84'
2887    false_easting = 500000
2888    false_northing = 10000000
2889
2890    if quantity is None:
2891        quantity = 'elevation'
2892
2893    if reduction is None:
2894        reduction = max
2895
2896    if basename_out is None:
2897        basename_out = basename_in + '_%s' %quantity
2898
2899    swwfile = basename_in + '.sww'
2900    ascfile = basename_out + '.asc'
2901    prjfile = basename_out + '.prj'
2902
2903
2904    if verbose: print 'Reading from %s' %swwfile
2905    #Read sww file
2906    from Scientific.IO.NetCDF import NetCDFFile
2907    fid = NetCDFFile(swwfile)
2908
2909    #Get extent and reference
2910    x = fid.variables['x'][:]
2911    y = fid.variables['y'][:]
2912    volumes = fid.variables['volumes'][:]
2913
2914    ymin = min(y); ymax = max(y)
2915    xmin = min(x); xmax = max(x)
2916
2917    number_of_timesteps = fid.dimensions['number_of_timesteps']
2918    number_of_points = fid.dimensions['number_of_points']
2919    if origin is None:
2920
2921        #Get geo_reference
2922        #sww files don't have to have a geo_ref
2923        try:
2924            geo_reference = Geo_reference(NetCDFObject=fid)
2925        except AttributeError, e:
2926            geo_reference = Geo_reference() #Default georef object
2927
2928        xllcorner = geo_reference.get_xllcorner()
2929        yllcorner = geo_reference.get_yllcorner()
2930        zone = geo_reference.get_zone()
2931    else:
2932        zone = origin[0]
2933        xllcorner = origin[1]
2934        yllcorner = origin[2]
2935
2936
2937    #Get quantity and reduce if applicable
2938    if verbose: print 'Reading quantity %s' %quantity
2939
2940    if quantity.lower() == 'depth':
2941        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2942    else:
2943        q = fid.variables[quantity][:]
2944
2945
2946    if len(q.shape) == 2:
2947        if verbose: print 'Reducing quantity %s' %quantity
2948        q_reduced = zeros( number_of_points, Float )
2949
2950        for k in range(number_of_points):
2951            q_reduced[k] = reduction( q[:,k] )
2952
2953        q = q_reduced
2954
2955    #Now q has dimension: number_of_points
2956
2957    #Create grid and update xll/yll corner and x,y
2958    if verbose: print 'Creating grid'
2959    ncols = int((xmax-xmin)/cellsize)+1
2960    nrows = int((ymax-ymin)/cellsize)+1
2961
2962    newxllcorner = xmin+xllcorner
2963    newyllcorner = ymin+yllcorner
2964
2965    x = x+xllcorner-newxllcorner
2966    y = y+yllcorner-newyllcorner
2967
2968    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2969    assert len(vertex_points.shape) == 2
2970
2971
2972    from Numeric import zeros, Float
2973    grid_points = zeros ( (ncols*nrows, 2), Float )
2974
2975
2976    for i in xrange(nrows):
2977        yg = i*cellsize
2978        for j in xrange(ncols):
2979            xg = j*cellsize
2980            k = i*ncols + j
2981
2982            grid_points[k,0] = xg
2983            grid_points[k,1] = yg
2984
2985    #Interpolate
2986    from least_squares import Interpolation
2987   
2988
2989    #FIXME: This should be done with precrop = True, otherwise it'll
2990    #take forever. With expand_search  set to False, some grid points might
2991    #miss out....
2992
2993    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
2994                           precrop = False, expand_search = False,
2995                           verbose = verbose)
2996
2997    #Interpolate using quantity values
2998    if verbose: print 'Interpolating'
2999    grid_values = interp.interpolate(q).flat
3000
3001    #Write
3002    #Write prj file
3003    if verbose: print 'Writing %s' %prjfile
3004    prjid = open(prjfile, 'w')
3005    prjid.write('Projection    %s\n' %'UTM')
3006    prjid.write('Zone          %d\n' %zone)
3007    prjid.write('Datum         %s\n' %datum)
3008    prjid.write('Zunits        NO\n')
3009    prjid.write('Units         METERS\n')
3010    prjid.write('Spheroid      %s\n' %datum)
3011    prjid.write('Xshift        %d\n' %false_easting)
3012    prjid.write('Yshift        %d\n' %false_northing)
3013    prjid.write('Parameters\n')
3014    prjid.close()
3015
3016
3017   
3018    if verbose: print 'Writing %s' %ascfile
3019    NODATA_value = -9999
3020 
3021    ascid = open(ascfile, 'w')
3022
3023    ascid.write('ncols         %d\n' %ncols)
3024    ascid.write('nrows         %d\n' %nrows)
3025    ascid.write('xllcorner     %d\n' %newxllcorner)
3026    ascid.write('yllcorner     %d\n' %newyllcorner)
3027    ascid.write('cellsize      %f\n' %cellsize)
3028    ascid.write('NODATA_value  %d\n' %NODATA_value)
3029
3030
3031    #Get bounding polygon from mesh
3032    P = interp.mesh.get_boundary_polygon()
3033    inside_indices = inside_polygon(grid_points, P)
3034
3035    for i in range(nrows):
3036        if verbose and i%((nrows+10)/10)==0:
3037            print 'Doing row %d of %d' %(i, nrows)
3038
3039        for j in range(ncols):
3040            index = (nrows-i-1)*ncols+j
3041
3042            if sometrue(inside_indices == index):
3043                ascid.write('%f ' %grid_values[index])
3044            else:
3045                ascid.write('%d ' %NODATA_value)
3046
3047        ascid.write('\n')
3048
3049    #Close
3050    ascid.close()
3051    fid.close()
3052
3053def sww2ers_obsolete(basename_in, basename_out = None,
3054            quantity = None,
3055            timestep = None,
3056            reduction = None,
3057            cellsize = 10,
3058            verbose = False,
3059            origin = None,
3060            datum = 'WGS84'):
3061   
3062    """Read SWW file and convert to Digitial Elevation model format (.asc)
3063
3064    Example:
3065
3066    ncols         3121
3067    nrows         1800
3068    xllcorner     722000
3069    yllcorner     5893000
3070    cellsize      25
3071    NODATA_value  -9999
3072    138.3698 137.4194 136.5062 135.5558 ..........
3073
3074    Also write accompanying file with same basename_in but extension .prj
3075    used to fix the UTM zone, datum, false northings and eastings.
3076
3077    The prj format is assumed to be as
3078
3079    Projection    UTM
3080    Zone          56
3081    Datum         WGS84
3082    Zunits        NO
3083    Units         METERS
3084    Spheroid      WGS84
3085    Xshift        0.0000000000
3086    Yshift        10000000.0000000000
3087    Parameters
3088
3089
3090    if quantity is given, out values from quantity otherwise default to
3091    elevation
3092
3093    if timestep (an index) is given, output quantity at that timestep
3094
3095    if reduction is given use that to reduce quantity over all timesteps.
3096
3097    """
3098    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
3099    import ermapper_grids
3100    from utilities.polygon import inside_polygon   
3101
3102    header = {}
3103    false_easting = 500000
3104    false_northing = 10000000
3105    NODATA_value = 0
3106
3107    if quantity is None:
3108        quantity = 'elevation'
3109
3110    if reduction is None:
3111        reduction = max
3112
3113    if basename_out is None:
3114        basename_out = basename_in + '_%s' %quantity
3115 
3116    swwfile = basename_in + '.sww'
3117    # Note the use of a .ers extension is optional (write_ermapper_grid will
3118    # deal with either option
3119    ersfile = basename_out
3120##    prjfile = basename_out + '.prj'
3121
3122
3123    if verbose: print 'Reading from %s' %swwfile
3124    #Read sww file
3125    from Scientific.IO.NetCDF import NetCDFFile
3126    fid = NetCDFFile(swwfile)
3127
3128    #Get extent and reference
3129    x = fid.variables['x'][:]
3130    y = fid.variables['y'][:]
3131    volumes = fid.variables['volumes'][:]
3132
3133    ymin = min(y); ymax = max(y)
3134    xmin = min(x); xmax = max(x)
3135
3136    number_of_timesteps = fid.dimensions['number_of_timesteps']
3137    number_of_points = fid.dimensions['number_of_points']
3138    if origin is None:
3139
3140        #Get geo_reference
3141        #sww files don't have to have a geo_ref
3142        try:
3143            geo_reference = Geo_reference(NetCDFObject=fid)
3144        except AttributeError, e:
3145            geo_reference = Geo_reference() #Default georef object
3146
3147        xllcorner = geo_reference.get_xllcorner()
3148        yllcorner = geo_reference.get_yllcorner()
3149        zone = geo_reference.get_zone()
3150    else:
3151        zone = origin[0]
3152        xllcorner = origin[1]
3153        yllcorner = origin[2]
3154
3155
3156    #Get quantity and reduce if applicable
3157    if verbose: print 'Reading quantity %s' %quantity
3158
3159    if quantity.lower() == 'depth':
3160        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3161    else:
3162        q = fid.variables[quantity][:]
3163
3164
3165    if len(q.shape) == 2:
3166        if verbose: print 'Reducing quantity %s' %quantity
3167        q_reduced = zeros( number_of_points, Float )
3168
3169        for k in range(number_of_points):
3170            q_reduced[k] = reduction( q[:,k] )
3171
3172        q = q_reduced
3173
3174    #Now q has dimension: number_of_points
3175
3176    #Create grid and update xll/yll corner and x,y
3177    if verbose: print 'Creating grid'
3178    ncols = int((xmax-xmin)/cellsize)+1
3179    nrows = int((ymax-ymin)/cellsize)+1
3180
3181    newxllcorner = xmin+xllcorner
3182    newyllcorner = ymin+yllcorner
3183
3184    x = x+xllcorner-newxllcorner
3185    y = y+yllcorner-newyllcorner
3186
3187    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3188    assert len(vertex_points.shape) == 2
3189
3190
3191    from Numeric import zeros, Float
3192    grid_points = zeros ( (ncols*nrows, 2), Float )
3193
3194
3195    for i in xrange(nrows):
3196        yg = (nrows-i)*cellsize # this will flip the order of the y values
3197        for j in xrange(ncols):
3198            xg = j*cellsize
3199            k = i*ncols + j
3200
3201            grid_points[k,0] = xg
3202            grid_points[k,1] = yg
3203
3204    #Interpolate
3205    from least_squares import Interpolation
3206   
3207
3208    #FIXME: This should be done with precrop = True (?), otherwise it'll
3209    #take forever. With expand_search  set to False, some grid points might
3210    #miss out....
3211
3212    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3213                           precrop = False, expand_search = False,
3214                           verbose = verbose)
3215
3216    #Interpolate using quantity values
3217    if verbose: print 'Interpolating'
3218    grid_values = interp.interpolate(q).flat
3219    grid_values = reshape(grid_values,(nrows, ncols))
3220
3221
3222    # setup header information
3223    header['datum'] = '"' + datum + '"'
3224    # FIXME The use of hardwired UTM and zone number needs to be made optional
3225    # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
3226    header['projection'] = '"UTM-' + str(zone) + '"'
3227    header['coordinatetype'] = 'EN'
3228    if header['coordinatetype'] == 'LL':
3229        header['longitude'] = str(newxllcorner)
3230        header['latitude'] = str(newyllcorner)
3231    elif header['coordinatetype'] == 'EN':
3232        header['eastings'] = str(newxllcorner)
3233        header['northings'] = str(newyllcorner)
3234    header['nullcellvalue'] = str(NODATA_value)
3235    header['xdimension'] = str(cellsize)
3236    header['ydimension'] = str(cellsize)
3237    header['value'] = '"' + quantity + '"'
3238
3239
3240    #Write
3241    if verbose: print 'Writing %s' %ersfile
3242    ermapper_grids.write_ermapper_grid(ersfile,grid_values, header)
3243
3244    fid.close()   
3245
3246##    ascid = open(ascfile, 'w')
3247##
3248##    ascid.write('ncols         %d\n' %ncols)
3249##    ascid.write('nrows         %d\n' %nrows)
3250##    ascid.write('xllcorner     %d\n' %newxllcorner)
3251##    ascid.write('yllcorner     %d\n' %newyllcorner)
3252##    ascid.write('cellsize      %f\n' %cellsize)
3253##    ascid.write('NODATA_value  %d\n' %NODATA_value)
3254##
3255##
3256##    #Get bounding polygon from mesh
3257##    P = interp.mesh.get_boundary_polygon()
3258##    inside_indices = inside_polygon(grid_points, P)
3259##
3260##    for i in range(nrows):
3261##        if verbose and i%((nrows+10)/10)==0:
3262##            print 'Doing row %d of %d' %(i, nrows)
3263##
3264##        for j in range(ncols):
3265##            index = (nrows-i-1)*ncols+j
3266##
3267##            if sometrue(inside_indices == index):
3268##                ascid.write('%f ' %grid_values[index])
3269##            else:
3270##                ascid.write('%d ' %NODATA_value)
3271##
3272##        ascid.write('\n')
3273##
3274##    #Close
3275##    ascid.close()
3276##    fid.close()
3277
Note: See TracBrowser for help on using the repository browser.