source: inundation/pyvolution/data_manager.py @ 1927

Last change on this file since 1927 was 1927, checked in by ole, 19 years ago

Improved expression parser for apply_expression_to_dictionary()

File size: 94.0 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51from Numeric import concatenate
52
53from coordinate_transforms.geo_reference import Geo_reference
54
55def make_filename(s):
56    """Transform argument string into a suitable filename
57    """
58
59    s = s.strip()
60    s = s.replace(' ', '_')
61    s = s.replace('(', '')
62    s = s.replace(')', '')
63    s = s.replace('__', '_')
64
65    return s
66
67
68def check_dir(path, verbose=None):
69    """Check that specified path exists.
70    If path does not exist it will be created if possible
71
72    USAGE:
73       checkdir(path, verbose):
74
75    ARGUMENTS:
76        path -- Directory
77        verbose -- Flag verbose output (default: None)
78
79    RETURN VALUE:
80        Verified path including trailing separator
81
82    """
83
84    import os, sys
85    import os.path
86
87    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
88        unix = 0
89    else:
90        unix = 1
91
92
93    if path[-1] != os.sep:
94        path = path + os.sep  # Add separator for directories
95
96    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
97    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
98        try:
99            exitcode=os.mkdir(path)
100
101            # Change access rights if possible
102            #
103            if unix:
104                exitcode=os.system('chmod 775 '+path)
105            else:
106                pass  # FIXME: What about acces rights under Windows?
107
108            if verbose: print 'MESSAGE: Directory', path, 'created.'
109
110        except:
111            print 'WARNING: Directory', path, 'could not be created.'
112            if unix:
113                path = '/tmp/'
114            else:
115                path = 'C:'
116
117            print 'Using directory %s instead' %path
118
119    return(path)
120
121
122
123def del_dir(path):
124    """Recursively delete directory path and all its contents
125    """
126
127    import os
128
129    if os.path.isdir(path):
130        for file in os.listdir(path):
131            X = os.path.join(path, file)
132
133
134            if os.path.isdir(X) and not os.path.islink(X):
135                del_dir(X)
136            else:
137                try:
138                    os.remove(X)
139                except:
140                    print "Could not remove file %s" %X
141
142        os.rmdir(path)
143
144
145
146def create_filename(datadir, filename, format, size=None, time=None):
147
148    import os
149    #from config import data_dir
150
151    FN = check_dir(datadir) + filename
152
153    if size is not None:
154        FN += '_size%d' %size
155
156    if time is not None:
157        FN += '_time%.2f' %time
158
159    FN += '.' + format
160    return FN
161
162
163def get_files(datadir, filename, format, size):
164    """Get all file (names) with gven name, size and format
165    """
166
167    import glob
168
169    import os
170    #from config import data_dir
171
172    dir = check_dir(datadir)
173
174    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
175    return glob.glob(pattern)
176
177
178
179#Generic class for storing output to e.g. visualisation or checkpointing
180class Data_format:
181    """Generic interface to data formats
182    """
183
184
185    def __init__(self, domain, extension, mode = 'w'):
186        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
187                                        '''   'w' (write)'''+\
188                                        '''   'r' (read)''' +\
189                                        '''   'a' (append)'''
190
191        #Create filename
192        #self.filename = create_filename(domain.get_datadir(),
193        #domain.get_name(), extension, len(domain))
194
195
196        self.filename = create_filename(domain.get_datadir(),
197                        domain.get_name(), extension)
198
199        #print 'F', self.filename
200        self.timestep = 0
201        self.number_of_volumes = len(domain)
202        self.domain = domain
203
204
205        #FIXME: Should we have a general set_precision function?
206
207
208
209#Class for storing output to e.g. visualisation
210class Data_format_sww(Data_format):
211    """Interface to native NetCDF format (.sww) for storing model output
212
213    There are two kinds of data
214
215    1: Constant data: Vertex coordinates and field values. Stored once
216    2: Variable data: Conserved quantities. Stored once per timestep.
217
218    All data is assumed to reside at vertex locations.
219    """
220
221
222    def __init__(self, domain, mode = 'w',\
223                 max_size = 2000000000,
224                 recursion = False):
225        from Scientific.IO.NetCDF import NetCDFFile
226        from Numeric import Int, Float, Float32
227
228        self.precision = Float32 #Use single precision
229        if hasattr(domain, 'max_size'):
230            self.max_size = domain.max_size #file size max is 2Gig
231        else:
232            self.max_size = max_size
233        self.recursion = recursion
234        self.mode = mode
235
236        Data_format.__init__(self, domain, 'sww', mode)
237
238
239        # NetCDF file definition
240        fid = NetCDFFile(self.filename, mode)
241
242        if mode == 'w':
243
244            #Create new file
245            fid.institution = 'Geoscience Australia'
246            fid.description = 'Output from pyvolution suitable for plotting'
247
248            if domain.smooth:
249                fid.smoothing = 'Yes'
250            else:
251                fid.smoothing = 'No'
252
253            fid.order = domain.default_order
254           
255            if hasattr(domain, 'texture'):                 
256                fid.texture = domain.texture
257            #else:                         
258            #    fid.texture = 'None'       
259
260            #Reference point
261            #Start time in seconds since the epoch (midnight 1/1/1970)
262            #FIXME: Use Georef
263            fid.starttime = domain.starttime
264
265            # dimension definitions
266            fid.createDimension('number_of_volumes', self.number_of_volumes)
267            fid.createDimension('number_of_vertices', 3)
268
269            if domain.smooth is True:
270                fid.createDimension('number_of_points', len(domain.vertexlist))
271            else:
272                fid.createDimension('number_of_points', 3*self.number_of_volumes)
273
274            fid.createDimension('number_of_timesteps', None) #extensible
275
276            # variable definitions
277            fid.createVariable('x', self.precision, ('number_of_points',))
278            fid.createVariable('y', self.precision, ('number_of_points',))
279            fid.createVariable('elevation', self.precision, ('number_of_points',))
280            if domain.geo_reference is not None:
281                domain.geo_reference.write_NetCDF(fid)
282               
283            #FIXME: Backwards compatibility
284            fid.createVariable('z', self.precision, ('number_of_points',))
285            #################################
286
287            fid.createVariable('volumes', Int, ('number_of_volumes',
288                                                'number_of_vertices'))
289
290            fid.createVariable('time', self.precision,
291                               ('number_of_timesteps',))
292
293            fid.createVariable('stage', self.precision,
294                               ('number_of_timesteps',
295                                'number_of_points'))
296
297            fid.createVariable('xmomentum', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('ymomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305        #Close
306        fid.close()
307
308
309    def store_connectivity(self):
310        """Specialisation of store_connectivity for net CDF format
311
312        Writes x,y,z coordinates of triangles constituting
313        the bed elevation.
314        """
315
316        from Scientific.IO.NetCDF import NetCDFFile
317
318        from Numeric import concatenate, Int
319
320        domain = self.domain
321
322        #Get NetCDF
323        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
324
325        # Get the variables
326        x = fid.variables['x']
327        y = fid.variables['y']
328        z = fid.variables['elevation']
329
330        volumes = fid.variables['volumes']
331
332        # Get X, Y and bed elevation Z
333        Q = domain.quantities['elevation']
334        X,Y,Z,V = Q.get_vertex_values(xy=True,
335                                      precision = self.precision)
336
337
338
339        x[:] = X.astype(self.precision)
340        y[:] = Y.astype(self.precision)
341        z[:] = Z.astype(self.precision)
342
343        #FIXME: Backwards compatibility
344        z = fid.variables['z']
345        z[:] = Z.astype(self.precision)
346        ################################
347
348        volumes[:] = V.astype(volumes.typecode())
349
350        #Close
351        fid.close()
352
353    def store_timestep(self, names):
354        """Store time and named quantities to file
355        """
356        from Scientific.IO.NetCDF import NetCDFFile
357        import types
358        from time import sleep
359        from os import stat
360
361        minimum_allowed_depth = 0.001
362        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
363        from Numeric import choose
364
365        #Get NetCDF
366        retries = 0
367        file_open = False
368        while not file_open and retries < 10:
369            try:
370                fid = NetCDFFile(self.filename, 'a') #Open existing file
371            except IOError:
372                #This could happen if someone was reading the file.
373                #In that case, wait a while and try again
374                msg = 'Warning (store_timestep): File %s could not be opened'\
375                      %self.filename
376                msg += ' - trying step %s again' %self.domain.time
377                print msg
378                retries += 1
379                sleep(1)
380            else:
381                file_open = True
382
383        if not file_open:
384            msg = 'File %s could not be opened for append' %self.filename
385            raise msg
386
387
388
389        #Check to see if the file is already too big:
390        time = fid.variables['time']
391        i = len(time)+1
392        file_size = stat(self.filename)[6]
393        file_size_increase =  file_size/i
394        if file_size + file_size_increase > self.max_size*(2**self.recursion):
395            #in order to get the file name and start time correct,
396            #I change the domian.filename and domain.starttime.
397            #This is the only way to do this without changing
398            #other modules (I think).
399
400            #write a filename addon that won't break swollens reader
401            #(10.sww is bad)
402            filename_ext = '_time_%s'%self.domain.time
403            filename_ext = filename_ext.replace('.', '_')
404            #remember the old filename, then give domain a
405            #name with the extension
406            old_domain_filename = self.domain.filename
407            if not self.recursion:
408                self.domain.filename = self.domain.filename+filename_ext
409
410            #change the domain starttime to the current time
411            old_domain_starttime = self.domain.starttime
412            self.domain.starttime = self.domain.time
413
414            #build a new data_structure.
415            next_data_structure=\
416                Data_format_sww(self.domain, mode=self.mode,\
417                                max_size = self.max_size,\
418                                recursion = self.recursion+1)
419            if not self.recursion:
420                print '    file_size = %s'%file_size
421                print '    saving file to %s'%next_data_structure.filename
422            #set up the new data_structure
423            self.domain.writer = next_data_structure
424
425            #FIXME - could be cleaner to use domain.store_timestep etc.
426            next_data_structure.store_connectivity()
427            next_data_structure.store_timestep(names)
428            fid.sync()
429            fid.close()
430
431            #restore the old starttime and filename
432            self.domain.starttime = old_domain_starttime
433            self.domain.filename = old_domain_filename
434        else:
435            self.recursion = False
436            domain = self.domain
437
438            # Get the variables
439            time = fid.variables['time']
440            stage = fid.variables['stage']
441            xmomentum = fid.variables['xmomentum']
442            ymomentum = fid.variables['ymomentum']
443            i = len(time)
444
445            #Store time
446            time[i] = self.domain.time
447
448
449            if type(names) not in [types.ListType, types.TupleType]:
450                names = [names]
451
452            for name in names:
453                # Get quantity
454                Q = domain.quantities[name]
455                A,V = Q.get_vertex_values(xy = False,
456                                          precision = self.precision)
457
458                #FIXME: Make this general (see below)
459                if name == 'stage':
460                    z = fid.variables['elevation']
461                    #print z[:]
462                    #print A-z[:]
463                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
464                    stage[i,:] = A.astype(self.precision)
465                elif name == 'xmomentum':
466                    xmomentum[i,:] = A.astype(self.precision)
467                elif name == 'ymomentum':
468                    ymomentum[i,:] = A.astype(self.precision)
469
470                #As in....
471                #eval( name + '[i,:] = A.astype(self.precision)' )
472                #FIXME: But we need a UNIT test for that before refactoring
473
474
475
476            #Flush and close
477            fid.sync()
478            fid.close()
479
480
481
482#Class for handling checkpoints data
483class Data_format_cpt(Data_format):
484    """Interface to native NetCDF format (.cpt)
485    """
486
487
488    def __init__(self, domain, mode = 'w'):
489        from Scientific.IO.NetCDF import NetCDFFile
490        from Numeric import Int, Float, Float
491
492        self.precision = Float #Use full precision
493
494        Data_format.__init__(self, domain, 'sww', mode)
495
496
497        # NetCDF file definition
498        fid = NetCDFFile(self.filename, mode)
499
500        if mode == 'w':
501            #Create new file
502            fid.institution = 'Geoscience Australia'
503            fid.description = 'Checkpoint data'
504            #fid.smooth = domain.smooth
505            fid.order = domain.default_order
506
507            # dimension definitions
508            fid.createDimension('number_of_volumes', self.number_of_volumes)
509            fid.createDimension('number_of_vertices', 3)
510
511            #Store info at all vertices (no smoothing)
512            fid.createDimension('number_of_points', 3*self.number_of_volumes)
513            fid.createDimension('number_of_timesteps', None) #extensible
514
515            # variable definitions
516
517            #Mesh
518            fid.createVariable('x', self.precision, ('number_of_points',))
519            fid.createVariable('y', self.precision, ('number_of_points',))
520
521
522            fid.createVariable('volumes', Int, ('number_of_volumes',
523                                                'number_of_vertices'))
524
525            fid.createVariable('time', self.precision,
526                               ('number_of_timesteps',))
527
528            #Allocate space for all quantities
529            for name in domain.quantities.keys():
530                fid.createVariable(name, self.precision,
531                                   ('number_of_timesteps',
532                                    'number_of_points'))
533
534        #Close
535        fid.close()
536
537
538    def store_checkpoint(self):
539        """
540        Write x,y coordinates of triangles.
541        Write connectivity (
542        constituting
543        the bed elevation.
544        """
545
546        from Scientific.IO.NetCDF import NetCDFFile
547
548        from Numeric import concatenate
549
550        domain = self.domain
551
552        #Get NetCDF
553        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
554
555        # Get the variables
556        x = fid.variables['x']
557        y = fid.variables['y']
558
559        volumes = fid.variables['volumes']
560
561        # Get X, Y and bed elevation Z
562        Q = domain.quantities['elevation']
563        X,Y,Z,V = Q.get_vertex_values(xy=True,
564                                      precision = self.precision)
565
566
567
568        x[:] = X.astype(self.precision)
569        y[:] = Y.astype(self.precision)
570        z[:] = Z.astype(self.precision)
571
572        volumes[:] = V
573
574        #Close
575        fid.close()
576
577
578    def store_timestep(self, name):
579        """Store time and named quantity to file
580        """
581        from Scientific.IO.NetCDF import NetCDFFile
582        from time import sleep
583
584        #Get NetCDF
585        retries = 0
586        file_open = False
587        while not file_open and retries < 10:
588            try:
589                fid = NetCDFFile(self.filename, 'a') #Open existing file
590            except IOError:
591                #This could happen if someone was reading the file.
592                #In that case, wait a while and try again
593                msg = 'Warning (store_timestep): File %s could not be opened'\
594                          %self.filename
595                msg += ' - trying again'
596                print msg
597                retries += 1
598                sleep(1)
599            else:
600                file_open = True
601
602            if not file_open:
603                msg = 'File %s could not be opened for append' %self.filename
604            raise msg
605
606
607        domain = self.domain
608
609        # Get the variables
610        time = fid.variables['time']
611        stage = fid.variables['stage']
612        i = len(time)
613
614        #Store stage
615        time[i] = self.domain.time
616
617        # Get quantity
618        Q = domain.quantities[name]
619        A,V = Q.get_vertex_values(xy=False,
620                                  precision = self.precision)
621
622        stage[i,:] = A.astype(self.precision)
623
624        #Flush and close
625        fid.sync()
626        fid.close()
627
628
629
630
631
632#Function for storing xya output
633#FIXME Not done yet for this version
634class Data_format_xya(Data_format):
635    """Generic interface to data formats
636    """
637
638
639    def __init__(self, domain, mode = 'w'):
640        from Scientific.IO.NetCDF import NetCDFFile
641        from Numeric import Int, Float, Float32
642
643        self.precision = Float32 #Use single precision
644
645        Data_format.__init__(self, domain, 'xya', mode)
646
647
648
649    #FIXME -This is the old xya format
650    def store_all(self):
651        """Specialisation of store all for xya format
652
653        Writes x,y,z coordinates of triangles constituting
654        the bed elevation.
655        """
656
657        from Numeric import concatenate
658
659        domain = self.domain
660
661        fd = open(self.filename, 'w')
662
663
664        if domain.smooth is True:
665            number_of_points =  len(domain.vertexlist)
666        else:
667            number_of_points = 3*self.number_of_volumes
668
669        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
670
671        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
672                 " # <vertex #> <x> <y> [attributes]" + "\n")
673
674
675        # Get X, Y, bed elevation and friction (index=0,1)
676        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
677                                           indices = (0,1), precision = self.precision)
678
679        bed_eles = A[:,0]
680        fricts = A[:,1]
681
682        # Get stage (index=0)
683        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
684                                       indices = (0,), precision = self.precision)
685
686        stages = B[:,0]
687
688        #<vertex #> <x> <y> [attributes]
689        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
690                                               stages, fricts):
691
692            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
693            fd.write(s)
694
695        #close
696        fd.close()
697
698
699    def store_timestep(self, t, V0, V1, V2):
700        """Store time, water heights (and momentums) to file
701        """
702        pass
703
704
705#Auxiliary
706def write_obj(filename,x,y,z):
707    """Store x,y,z vectors into filename (obj format)
708       Vectors are assumed to have dimension (M,3) where
709       M corresponds to the number elements.
710       triangles are assumed to be disconnected
711
712       The three numbers in each vector correspond to three vertices,
713
714       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
715
716    """
717    #print 'Writing obj to %s' % filename
718
719    import os.path
720
721    root, ext = os.path.splitext(filename)
722    if ext == '.obj':
723        FN = filename
724    else:
725        FN = filename + '.obj'
726
727
728    outfile = open(FN, 'wb')
729    outfile.write("# Triangulation as an obj file\n")
730
731    M, N = x.shape
732    assert N==3  #Assuming three vertices per element
733
734    for i in range(M):
735        for j in range(N):
736            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
737
738    for i in range(M):
739        base = i*N
740        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
741
742    outfile.close()
743
744
745#########################################################
746#Conversion routines
747########################################################
748
749def sww2obj(basefilename, size):
750    """Convert netcdf based data output to obj
751    """
752    from Scientific.IO.NetCDF import NetCDFFile
753
754    from Numeric import Float, zeros
755
756    #Get NetCDF
757    FN = create_filename('.', basefilename, 'sww', size)
758    print 'Reading from ', FN
759    fid = NetCDFFile(FN, 'r')  #Open existing file for read
760
761
762    # Get the variables
763    x = fid.variables['x']
764    y = fid.variables['y']
765    z = fid.variables['elevation']
766    time = fid.variables['time']
767    stage = fid.variables['stage']
768
769    M = size  #Number of lines
770    xx = zeros((M,3), Float)
771    yy = zeros((M,3), Float)
772    zz = zeros((M,3), Float)
773
774    for i in range(M):
775        for j in range(3):
776            xx[i,j] = x[i+j*M]
777            yy[i,j] = y[i+j*M]
778            zz[i,j] = z[i+j*M]
779
780    #Write obj for bathymetry
781    FN = create_filename('.', basefilename, 'obj', size)
782    write_obj(FN,xx,yy,zz)
783
784
785    #Now read all the data with variable information, combine with
786    #x,y info and store as obj
787
788    for k in range(len(time)):
789        t = time[k]
790        print 'Processing timestep %f' %t
791
792        for i in range(M):
793            for j in range(3):
794                zz[i,j] = stage[k,i+j*M]
795
796
797        #Write obj for variable data
798        #FN = create_filename(basefilename, 'obj', size, time=t)
799        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
800        write_obj(FN,xx,yy,zz)
801
802
803def dat2obj(basefilename):
804    """Convert line based data output to obj
805    FIXME: Obsolete?
806    """
807
808    import glob, os
809    from config import data_dir
810
811
812    #Get bathymetry and x,y's
813    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
814
815    from Numeric import zeros, Float
816
817    M = len(lines)  #Number of lines
818    x = zeros((M,3), Float)
819    y = zeros((M,3), Float)
820    z = zeros((M,3), Float)
821
822    ##i = 0
823    for i, line in enumerate(lines):
824        tokens = line.split()
825        values = map(float,tokens)
826
827        for j in range(3):
828            x[i,j] = values[j*3]
829            y[i,j] = values[j*3+1]
830            z[i,j] = values[j*3+2]
831
832        ##i += 1
833
834
835    #Write obj for bathymetry
836    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
837
838
839    #Now read all the data files with variable information, combine with
840    #x,y info
841    #and store as obj
842
843    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
844
845    for filename in files:
846        print 'Processing %s' % filename
847
848        lines = open(data_dir+os.sep+filename,'r').readlines()
849        assert len(lines) == M
850        root, ext = os.path.splitext(filename)
851
852        #Get time from filename
853        i0 = filename.find('_time=')
854        if i0 == -1:
855            #Skip bathymetry file
856            continue
857
858        i0 += 6  #Position where time starts
859        i1 = filename.find('.dat')
860
861        if i1 > i0:
862            t = float(filename[i0:i1])
863        else:
864            raise 'Hmmmm'
865
866
867
868        ##i = 0
869        for i, line in enumerate(lines):
870            tokens = line.split()
871            values = map(float,tokens)
872
873            for j in range(3):
874                z[i,j] = values[j]
875
876            ##i += 1
877
878        #Write obj for variable data
879        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
880
881
882def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
883    """Read netcdf filename1, pick timesteps first:step:last and save to
884    nettcdf file filename2
885    """
886    from Scientific.IO.NetCDF import NetCDFFile
887
888    #Get NetCDF
889    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
890    outfile = NetCDFFile(filename2, 'w')  #Open new file
891
892
893    #Copy dimensions
894    for d in infile.dimensions:
895        outfile.createDimension(d, infile.dimensions[d])
896
897    for name in infile.variables:
898        var = infile.variables[name]
899        outfile.createVariable(name, var.typecode(), var.dimensions)
900
901
902    #Copy the static variables
903    for name in infile.variables:
904        if name == 'time' or name == 'stage':
905            pass
906        else:
907            #Copy
908            outfile.variables[name][:] = infile.variables[name][:]
909
910    #Copy selected timesteps
911    time = infile.variables['time']
912    stage = infile.variables['stage']
913
914    newtime = outfile.variables['time']
915    newstage = outfile.variables['stage']
916
917    if last is None:
918        last = len(time)
919
920    selection = range(first, last, step)
921    for i, j in enumerate(selection):
922        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
923        newtime[i] = time[j]
924        newstage[i,:] = stage[j,:]
925
926    #Close
927    infile.close()
928    outfile.close()
929
930
931#Get data objects
932def get_dataobject(domain, mode='w'):
933    """Return instance of class of given format using filename
934    """
935
936    cls = eval('Data_format_%s' %domain.format)
937    return cls(domain, mode)
938
939def xya2pts(basename_in, basename_out=None, verbose=False,
940            #easting_min=None, easting_max=None,
941            #northing_min=None, northing_max=None,
942            stride = 1,           
943            attribute_name = 'elevation',
944            z_func = None):
945    """Read points data from ascii (.xya)
946
947    Example:
948
949              x(m)        y(m)        z(m)
950         0.00000e+00  0.00000e+00  1.3535000e-01
951         0.00000e+00  1.40000e-02  1.3535000e-01
952
953   
954
955    Convert to NetCDF pts format which is
956
957    points:  (Nx2) Float array
958    elevation: N Float array
959
960    Only lines that contain three numeric values are processed
961
962    If z_func is specified, it will be applied to the third field
963    """
964
965    import os 
966    #from Scientific.IO.NetCDF import NetCDFFile
967    from Numeric import Float, arrayrange, concatenate
968
969    root, ext = os.path.splitext(basename_in)
970
971    if ext == '': ext = '.xya'
972
973    #Get NetCDF
974    infile = open(root + ext, 'r')  #Open existing xya file for read
975
976    if verbose: print 'Reading xya points from %s' %(root + ext)
977
978    points = []
979    attribute = []
980    for i, line in enumerate(infile.readlines()):
981
982        if i % stride != 0: continue
983       
984        fields = line.split()
985
986        try:
987            assert len(fields) == 3
988        except:   
989            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
990
991        try:
992            x = float( fields[0] )
993            y = float( fields[1] )
994            z = float( fields[2] )           
995        except:
996            continue
997
998        points.append( [x, y] )
999
1000        if callable(z_func):
1001            attribute.append(z_func(z))           
1002        else:   
1003            attribute.append(z)
1004
1005
1006    #Get output file
1007    if basename_out == None:
1008        ptsname = root + '.pts'
1009    else:
1010        ptsname = basename_out + '.pts'
1011
1012    if verbose: print 'Store to NetCDF file %s' %ptsname
1013    write_ptsfile(ptsname, points, attribute, attribute_name)
1014
1015
1016def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1017    """Write points and associated attribute to pts (NetCDF) format
1018    """
1019
1020    from Numeric import Float
1021   
1022    if attribute_name is None:
1023        attribute_name = 'attribute'       
1024   
1025
1026    from Scientific.IO.NetCDF import NetCDFFile
1027   
1028    # NetCDF file definition
1029    outfile = NetCDFFile(ptsname, 'w')
1030
1031
1032    #Create new file
1033    outfile.institution = 'Geoscience Australia'
1034    outfile.description = 'NetCDF pts format for compact and '\
1035                          'portable storage of spatial point data'
1036   
1037
1038    #Georeferencing
1039    from coordinate_transforms.geo_reference import Geo_reference
1040    Geo_reference().write_NetCDF(outfile)
1041   
1042
1043    outfile.createDimension('number_of_points', len(points))
1044    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1045
1046    # variable definitions
1047    outfile.createVariable('points', Float, ('number_of_points',
1048                                             'number_of_dimensions'))
1049    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1050
1051    # Get handles to the variables
1052    nc_points = outfile.variables['points']
1053    nc_attribute = outfile.variables[attribute_name]
1054
1055    #Store data
1056    nc_points[:, :] = points
1057    nc_attribute[:] = attribute
1058
1059    outfile.close()
1060   
1061
1062def dem2pts(basename_in, basename_out=None, verbose=False,
1063            easting_min=None, easting_max=None,
1064            northing_min=None, northing_max=None):
1065    """Read Digitial Elevation model from the following NetCDF format (.dem)
1066
1067    Example:
1068
1069    ncols         3121
1070    nrows         1800
1071    xllcorner     722000
1072    yllcorner     5893000
1073    cellsize      25
1074    NODATA_value  -9999
1075    138.3698 137.4194 136.5062 135.5558 ..........
1076
1077    Convert to NetCDF pts format which is
1078
1079    points:  (Nx2) Float array
1080    elevation: N Float array
1081    """
1082
1083    #FIXME: Can this be written feasibly using write_pts?
1084
1085    import os
1086    from Scientific.IO.NetCDF import NetCDFFile
1087    from Numeric import Float, zeros, reshape
1088
1089    root = basename_in
1090
1091    #Get NetCDF
1092    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1093
1094    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1095
1096    ncols = infile.ncols[0]
1097    nrows = infile.nrows[0]
1098    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1099    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1100    cellsize = infile.cellsize[0]
1101    NODATA_value = infile.NODATA_value[0]
1102    dem_elevation = infile.variables['elevation']
1103
1104    zone = infile.zone[0]
1105    false_easting = infile.false_easting[0]
1106    false_northing = infile.false_northing[0]
1107
1108    #Text strings
1109    projection = infile.projection
1110    datum = infile.datum
1111    units = infile.units
1112
1113
1114    #Get output file
1115    if basename_out == None:
1116        ptsname = root + '.pts'
1117    else:
1118        ptsname = basename_out + '.pts'
1119
1120    if verbose: print 'Store to NetCDF file %s' %ptsname
1121    # NetCDF file definition
1122    outfile = NetCDFFile(ptsname, 'w')
1123
1124    #Create new file
1125    outfile.institution = 'Geoscience Australia'
1126    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1127                      'of spatial point data'
1128    #assign default values
1129    if easting_min is None: easting_min = xllcorner
1130    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1131    if northing_min is None: northing_min = yllcorner
1132    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1133
1134    #compute offsets to update georeferencing
1135    easting_offset = xllcorner - easting_min
1136    northing_offset = yllcorner - northing_min
1137
1138    #Georeferencing
1139    outfile.zone = zone
1140    outfile.xllcorner = easting_min #Easting of lower left corner
1141    outfile.yllcorner = northing_min #Northing of lower left corner
1142    outfile.false_easting = false_easting
1143    outfile.false_northing = false_northing
1144
1145    outfile.projection = projection
1146    outfile.datum = datum
1147    outfile.units = units
1148
1149
1150    #Grid info (FIXME: probably not going to be used, but heck)
1151    outfile.ncols = ncols
1152    outfile.nrows = nrows
1153
1154
1155    # dimension definitions
1156    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1157    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1158    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1159    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1160
1161    # variable definitions
1162    outfile.createVariable('points', Float, ('number_of_points',
1163                                             'number_of_dimensions'))
1164    outfile.createVariable('elevation', Float, ('number_of_points',))
1165
1166    # Get handles to the variables
1167    points = outfile.variables['points']
1168    elevation = outfile.variables['elevation']
1169
1170    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1171
1172    #Store data
1173    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1174    global_index = 0
1175    for i in range(nrows):
1176        if verbose and i%((nrows+10)/10)==0:
1177            print 'Processing row %d of %d' %(i, nrows)       
1178       
1179        lower_index = global_index
1180        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1181        telev =  zeros(ncols_in_bounding_box, Float)
1182        local_index = 0
1183
1184        y = (nrows-i)*cellsize + yllcorner
1185        for j in range(ncols):
1186
1187            x = j*cellsize + xllcorner
1188            if easting_min <= x <= easting_max and \
1189               northing_min <= y <= northing_max:
1190                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1191                telev[local_index] = dem_elevation_r[i, j]
1192                global_index += 1
1193                local_index += 1
1194
1195        upper_index = global_index
1196
1197        if upper_index == lower_index + ncols_in_bounding_box:
1198            points[lower_index:upper_index, :] = tpoints
1199            elevation[lower_index:upper_index] = telev
1200
1201    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1202
1203    infile.close()
1204    outfile.close()
1205
1206
1207
1208def sww2dem(basename_in, basename_out = None,
1209            quantity = None,
1210            timestep = None,
1211            reduction = None,
1212            cellsize = 10,
1213            easting_min = None,
1214            easting_max = None,
1215            northing_min = None,
1216            northing_max = None,         
1217            verbose = False,
1218            origin = None,
1219            datum = 'WGS84',
1220            format = 'ers'):
1221   
1222    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1223
1224    Example (ASC):
1225
1226    ncols         3121
1227    nrows         1800
1228    xllcorner     722000
1229    yllcorner     5893000
1230    cellsize      25
1231    NODATA_value  -9999
1232    138.3698 137.4194 136.5062 135.5558 ..........
1233
1234    Also write accompanying file with same basename_in but extension .prj
1235    used to fix the UTM zone, datum, false northings and eastings.
1236
1237    The prj format is assumed to be as
1238
1239    Projection    UTM
1240    Zone          56
1241    Datum         WGS84
1242    Zunits        NO
1243    Units         METERS
1244    Spheroid      WGS84
1245    Xshift        0.0000000000
1246    Yshift        10000000.0000000000
1247    Parameters
1248
1249
1250    The parameter quantity must be the name of an existing quantity or
1251    an expression involving existing quantities. The default is
1252    'elevation'.
1253
1254    if timestep (an index) is given, output quantity at that timestep
1255
1256    if reduction is given use that to reduce quantity over all timesteps.
1257
1258    datum
1259   
1260    format can be either 'asc' or 'ers'
1261    """
1262
1263    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1264    from utilities.polygon import inside_polygon
1265    from util import apply_expression_to_dictionary
1266   
1267    msg = 'Format must be either asc or ers'
1268    assert format.lower() in ['asc', 'ers'], msg
1269   
1270    false_easting = 500000
1271    false_northing = 10000000
1272
1273    if quantity is None:
1274        quantity = 'elevation'
1275
1276    if reduction is None:
1277        reduction = max
1278
1279    if basename_out is None:
1280        basename_out = basename_in + '_%s' %quantity
1281 
1282    swwfile = basename_in + '.sww'
1283    demfile = basename_out + '.' + format
1284    # Note the use of a .ers extension is optional (write_ermapper_grid will
1285    # deal with either option
1286
1287    #Read sww file
1288    if verbose: print 'Reading from %s' %swwfile
1289    from Scientific.IO.NetCDF import NetCDFFile
1290    fid = NetCDFFile(swwfile)
1291
1292    #Get extent and reference
1293    x = fid.variables['x'][:]
1294    y = fid.variables['y'][:]
1295    volumes = fid.variables['volumes'][:]
1296
1297    number_of_timesteps = fid.dimensions['number_of_timesteps']
1298    number_of_points = fid.dimensions['number_of_points']
1299    if origin is None:
1300
1301        #Get geo_reference
1302        #sww files don't have to have a geo_ref
1303        try:
1304            geo_reference = Geo_reference(NetCDFObject=fid)
1305        except AttributeError, e:
1306            geo_reference = Geo_reference() #Default georef object
1307
1308        xllcorner = geo_reference.get_xllcorner()
1309        yllcorner = geo_reference.get_yllcorner()
1310        zone = geo_reference.get_zone()
1311    else:
1312        zone = origin[0]
1313        xllcorner = origin[1]
1314        yllcorner = origin[2]
1315
1316
1317
1318    #Get quantity and reduce if applicable
1319    if verbose: print 'Processing quantity %s' %quantity
1320
1321    #Turn NetCDF objects into Numeric arrays
1322    quantity_dict = {}
1323    for name in fid.variables.keys():
1324        quantity_dict[name] = fid.variables[name][:]
1325
1326    #print quantity_dict     
1327   
1328
1329    q = apply_expression_to_dictionary(quantity, quantity_dict)
1330
1331
1332    if len(q.shape) == 2:
1333        #q has a time component and needs to be reduced along
1334        #the temporal dimension
1335        if verbose: print 'Reducing quantity %s' %quantity
1336        q_reduced = zeros( number_of_points, Float )
1337
1338        for k in range(number_of_points):
1339            q_reduced[k] = reduction( q[:,k] )
1340
1341        q = q_reduced
1342
1343    #Post condition: Now q has dimension: number_of_points
1344    assert len(q.shape) == 1
1345    assert q.shape[0] == number_of_points   
1346   
1347
1348    #Create grid and update xll/yll corner and x,y
1349
1350    #Relative extent
1351    if easting_min is None:
1352        xmin = min(x)
1353    else:
1354        xmin = easting_min - xllcorner
1355
1356    if easting_max is None:
1357        xmax = max(x)
1358    else:
1359        xmax = easting_max - xllcorner
1360       
1361    if northing_min is None:       
1362        ymin = min(y)
1363    else:
1364        ymin = northing_min - yllcorner
1365       
1366    if northing_max is None:       
1367        ymax = max(y)
1368    else:
1369        ymax = northing_max - yllcorner
1370       
1371       
1372   
1373    if verbose: print 'Creating grid'
1374    ncols = int((xmax-xmin)/cellsize)+1
1375    nrows = int((ymax-ymin)/cellsize)+1
1376
1377
1378    #New absolute reference and coordinates
1379    newxllcorner = xmin+xllcorner
1380    newyllcorner = ymin+yllcorner
1381
1382    x = x+xllcorner-newxllcorner
1383    y = y+yllcorner-newyllcorner
1384
1385    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1386    assert len(vertex_points.shape) == 2
1387
1388
1389    from Numeric import zeros, Float
1390    grid_points = zeros ( (ncols*nrows, 2), Float )
1391
1392
1393    for i in xrange(nrows):
1394        if format.lower() == 'asc':
1395            yg = i*cellsize
1396        else:   
1397            #this will flip the order of the y values for ers
1398            yg = (nrows-i)*cellsize
1399           
1400        for j in xrange(ncols):
1401            xg = j*cellsize
1402            k = i*ncols + j
1403
1404            grid_points[k,0] = xg
1405            grid_points[k,1] = yg
1406
1407    #Interpolate
1408    from least_squares import Interpolation
1409   
1410
1411    #FIXME: This should be done with precrop = True (?), otherwise it'll
1412    #take forever. With expand_search  set to False, some grid points might
1413    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1414
1415    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1416                           precrop = False, expand_search = False,
1417                           verbose = verbose)
1418
1419    #Interpolate using quantity values
1420    if verbose: print 'Interpolating'
1421    grid_values = interp.interpolate(q).flat
1422
1423    if format.lower() == 'ers':
1424        # setup ERS header information
1425        grid_values = reshape(grid_values,(nrows, ncols))   
1426        NODATA_value = 0
1427        header = {}
1428        header['datum'] = '"' + datum + '"'
1429        # FIXME The use of hardwired UTM and zone number needs to be made optional
1430        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1431        header['projection'] = '"UTM-' + str(zone) + '"'
1432        header['coordinatetype'] = 'EN'
1433        if header['coordinatetype'] == 'LL':
1434            header['longitude'] = str(newxllcorner)
1435            header['latitude'] = str(newyllcorner)
1436        elif header['coordinatetype'] == 'EN':
1437            header['eastings'] = str(newxllcorner)
1438            header['northings'] = str(newyllcorner)
1439        header['nullcellvalue'] = str(NODATA_value)
1440        header['xdimension'] = str(cellsize)
1441        header['ydimension'] = str(cellsize)
1442        header['value'] = '"' + quantity + '"'
1443
1444
1445        #Write
1446        if verbose: print 'Writing %s' %demfile
1447        import ermapper_grids
1448        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1449
1450        fid.close()   
1451    else:
1452        #Write to Ascii format
1453       
1454        #Write prj file
1455        prjfile = basename_out + '.prj' 
1456       
1457        if verbose: print 'Writing %s' %prjfile
1458        prjid = open(prjfile, 'w')
1459        prjid.write('Projection    %s\n' %'UTM')
1460        prjid.write('Zone          %d\n' %zone)
1461        prjid.write('Datum         %s\n' %datum)
1462        prjid.write('Zunits        NO\n')
1463        prjid.write('Units         METERS\n')
1464        prjid.write('Spheroid      %s\n' %datum)
1465        prjid.write('Xshift        %d\n' %false_easting)
1466        prjid.write('Yshift        %d\n' %false_northing)
1467        prjid.write('Parameters\n')
1468        prjid.close()
1469
1470
1471   
1472        if verbose: print 'Writing %s' %ascfile
1473        NODATA_value = -9999
1474 
1475        ascid = open(demfile, 'w')
1476
1477        ascid.write('ncols         %d\n' %ncols)
1478        ascid.write('nrows         %d\n' %nrows)
1479        ascid.write('xllcorner     %d\n' %newxllcorner)
1480        ascid.write('yllcorner     %d\n' %newyllcorner)
1481        ascid.write('cellsize      %f\n' %cellsize)
1482        ascid.write('NODATA_value  %d\n' %NODATA_value)
1483
1484
1485        #Get bounding polygon from mesh
1486        P = interp.mesh.get_boundary_polygon()
1487        inside_indices = inside_polygon(grid_points, P)
1488
1489        for i in range(nrows):
1490            if verbose and i%((nrows+10)/10)==0:
1491                print 'Doing row %d of %d' %(i, nrows)
1492
1493            for j in range(ncols):
1494                index = (nrows-i-1)*ncols+j
1495
1496                if sometrue(inside_indices == index):
1497                    ascid.write('%f ' %grid_values[index])
1498                else:
1499                    ascid.write('%d ' %NODATA_value)
1500
1501            ascid.write('\n')
1502
1503        #Close
1504        ascid.close()
1505        fid.close()
1506
1507#Backwards compatibility       
1508def sww2asc(basename_in, basename_out = None,
1509            quantity = None,
1510            timestep = None,
1511            reduction = None,
1512            cellsize = 10,
1513            verbose = False,
1514            origin = None):
1515    print 'sww2asc will soon be obsoleted - please use sww2dem'
1516    sww2dem(basename_in,
1517            basename_out = basename_out,
1518            quantity = quantity,
1519            timestep = timestep,
1520            reduction = reduction,
1521            cellsize = cellsize,
1522            verbose = verbose,
1523            origin = origin,
1524            datum = 'WGS84',
1525            format = 'asc')
1526           
1527def sww2ers(basename_in, basename_out = None,
1528            quantity = None,
1529            timestep = None,
1530            reduction = None,
1531            cellsize = 10,
1532            verbose = False,
1533            origin = None,
1534            datum = 'WGS84'):
1535    print 'sww2ers will soon be obsoleted - please use sww2dem'
1536    sww2dem(basename_in, 
1537            basename_out = basename_out,
1538            quantity = quantity,
1539            timestep = timestep,
1540            reduction = reduction,
1541            cellsize = cellsize,
1542            verbose = verbose,
1543            origin = origin,
1544            datum = datum,
1545            format = 'ers')         
1546################################# END COMPATIBILITY ##############         
1547
1548
1549
1550
1551def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1552                                  verbose=False):
1553    """Read Digitial Elevation model from the following ASCII format (.asc)
1554
1555    Example:
1556
1557    ncols         3121
1558    nrows         1800
1559    xllcorner     722000
1560    yllcorner     5893000
1561    cellsize      25
1562    NODATA_value  -9999
1563    138.3698 137.4194 136.5062 135.5558 ..........
1564
1565    Convert basename_in + '.asc' to NetCDF format (.dem)
1566    mimicking the ASCII format closely.
1567
1568
1569    An accompanying file with same basename_in but extension .prj must exist
1570    and is used to fix the UTM zone, datum, false northings and eastings.
1571
1572    The prj format is assumed to be as
1573
1574    Projection    UTM
1575    Zone          56
1576    Datum         WGS84
1577    Zunits        NO
1578    Units         METERS
1579    Spheroid      WGS84
1580    Xshift        0.0000000000
1581    Yshift        10000000.0000000000
1582    Parameters
1583    """
1584
1585    import os
1586    from Scientific.IO.NetCDF import NetCDFFile
1587    from Numeric import Float, array
1588
1589    #root, ext = os.path.splitext(basename_in)
1590    root = basename_in
1591
1592    ###########################################
1593    # Read Meta data
1594    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1595    metadatafile = open(root + '.prj')
1596    metalines = metadatafile.readlines()
1597    metadatafile.close()
1598
1599    L = metalines[0].strip().split()
1600    assert L[0].strip().lower() == 'projection'
1601    projection = L[1].strip()                   #TEXT
1602
1603    L = metalines[1].strip().split()
1604    assert L[0].strip().lower() == 'zone'
1605    zone = int(L[1].strip())
1606
1607    L = metalines[2].strip().split()
1608    assert L[0].strip().lower() == 'datum'
1609    datum = L[1].strip()                        #TEXT
1610
1611    L = metalines[3].strip().split()
1612    assert L[0].strip().lower() == 'zunits'     #IGNORE
1613    zunits = L[1].strip()                       #TEXT
1614
1615    L = metalines[4].strip().split()
1616    assert L[0].strip().lower() == 'units'
1617    units = L[1].strip()                        #TEXT
1618
1619    L = metalines[5].strip().split()
1620    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1621    spheroid = L[1].strip()                     #TEXT
1622
1623    L = metalines[6].strip().split()
1624    assert L[0].strip().lower() == 'xshift'
1625    false_easting = float(L[1].strip())
1626
1627    L = metalines[7].strip().split()
1628    assert L[0].strip().lower() == 'yshift'
1629    false_northing = float(L[1].strip())
1630
1631    #print false_easting, false_northing, zone, datum
1632
1633
1634    ###########################################
1635    #Read DEM data
1636
1637    datafile = open(basename_in + '.asc')
1638
1639    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1640    lines = datafile.readlines()
1641    datafile.close()
1642
1643    if verbose: print 'Got', len(lines), ' lines'
1644
1645    ncols = int(lines[0].split()[1].strip())
1646    nrows = int(lines[1].split()[1].strip())
1647    xllcorner = float(lines[2].split()[1].strip())
1648    yllcorner = float(lines[3].split()[1].strip())
1649    cellsize = float(lines[4].split()[1].strip())
1650    NODATA_value = int(lines[5].split()[1].strip())
1651
1652    assert len(lines) == nrows + 6
1653
1654
1655    ##########################################
1656
1657
1658    if basename_out == None:
1659        netcdfname = root + '.dem'
1660    else:
1661        netcdfname = basename_out + '.dem'
1662
1663    if verbose: print 'Store to NetCDF file %s' %netcdfname
1664    # NetCDF file definition
1665    fid = NetCDFFile(netcdfname, 'w')
1666
1667    #Create new file
1668    fid.institution = 'Geoscience Australia'
1669    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1670                      'of spatial point data'
1671
1672    fid.ncols = ncols
1673    fid.nrows = nrows
1674    fid.xllcorner = xllcorner
1675    fid.yllcorner = yllcorner
1676    fid.cellsize = cellsize
1677    fid.NODATA_value = NODATA_value
1678
1679    fid.zone = zone
1680    fid.false_easting = false_easting
1681    fid.false_northing = false_northing
1682    fid.projection = projection
1683    fid.datum = datum
1684    fid.units = units
1685
1686
1687    # dimension definitions
1688    fid.createDimension('number_of_rows', nrows)
1689    fid.createDimension('number_of_columns', ncols)
1690
1691    # variable definitions
1692    fid.createVariable('elevation', Float, ('number_of_rows',
1693                                            'number_of_columns'))
1694
1695    # Get handles to the variables
1696    elevation = fid.variables['elevation']
1697
1698    #Store data
1699    n = len(lines[6:])
1700    for i, line in enumerate(lines[6:]):
1701        fields = line.split()
1702        if verbose and i%((n+10)/10)==0:
1703            print 'Processing row %d of %d' %(i, nrows)           
1704
1705        elevation[i, :] = array([float(x) for x in fields])
1706
1707    fid.close()
1708
1709
1710
1711
1712
1713def ferret2sww(basename_in, basename_out = None,
1714               verbose = False,
1715               minlat = None, maxlat = None,
1716               minlon = None, maxlon = None,
1717               mint = None, maxt = None, mean_stage = 0,
1718               origin = None, zscale = 1,
1719               fail_on_NaN = True,
1720               NaN_filler = 0,
1721               elevation = None,
1722               inverted_bathymetry = False
1723               ): #FIXME: Bathymetry should be obtained
1724                                  #from MOST somehow.
1725                                  #Alternatively from elsewhere
1726                                  #or, as a last resort,
1727                                  #specified here.
1728                                  #The value of -100 will work
1729                                  #for the Wollongong tsunami
1730                                  #scenario but is very hacky
1731    """Convert 'Ferret' NetCDF format for wave propagation to
1732    sww format native to pyvolution.
1733
1734    Specify only basename_in and read files of the form
1735    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1736    relative height, x-velocity and y-velocity, respectively.
1737
1738    Also convert latitude and longitude to UTM. All coordinates are
1739    assumed to be given in the GDA94 datum.
1740
1741    min's and max's: If omitted - full extend is used.
1742    To include a value min may equal it, while max must exceed it.
1743    Lat and lon are assuemd to be in decimal degrees
1744
1745    origin is a 3-tuple with geo referenced
1746    UTM coordinates (zone, easting, northing)
1747
1748    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1749    which means that longitude is the fastest
1750    varying dimension (row major order, so to speak)
1751
1752    ferret2sww uses grid points as vertices in a triangular grid
1753    counting vertices from lower left corner upwards, then right
1754    """
1755
1756    import os
1757    from Scientific.IO.NetCDF import NetCDFFile
1758    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1759    from Numeric import allclose, around
1760       
1761    precision = Float
1762
1763
1764    #Get NetCDF data
1765    if verbose: print 'Reading files %s_*.nc' %basename_in
1766    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1767    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1768    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1769    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1770
1771    if basename_out is None:
1772        swwname = basename_in + '.sww'
1773    else:
1774        swwname = basename_out + '.sww'
1775
1776    times = file_h.variables['TIME']
1777    latitudes = file_h.variables['LAT']
1778    longitudes = file_h.variables['LON']
1779
1780
1781
1782    #Precision used by most for lat/lon is 4 or 5 decimals
1783    e_lat = around(file_e.variables['LAT'][:], 5)
1784    e_lon = around(file_e.variables['LON'][:], 5)
1785
1786    #Check that files are compatible
1787    assert allclose(latitudes, file_u.variables['LAT'])
1788    assert allclose(latitudes, file_v.variables['LAT'])
1789    assert allclose(latitudes, e_lat)
1790
1791    assert allclose(longitudes, file_u.variables['LON'])
1792    assert allclose(longitudes, file_v.variables['LON'])
1793    assert allclose(longitudes, e_lon)   
1794
1795
1796
1797    if mint == None:
1798        jmin = 0
1799    else:
1800        jmin = searchsorted(times, mint)
1801
1802    if maxt == None:
1803        jmax=len(times)
1804    else:
1805        jmax = searchsorted(times, maxt)
1806
1807    if minlat == None:
1808        kmin=0
1809    else:
1810        kmin = searchsorted(latitudes, minlat)
1811
1812    if maxlat == None:
1813        kmax = len(latitudes)
1814    else:
1815        kmax = searchsorted(latitudes, maxlat)
1816
1817    if minlon == None:
1818        lmin=0
1819    else:
1820        lmin = searchsorted(longitudes, minlon)
1821
1822    if maxlon == None:
1823        lmax = len(longitudes)
1824    else:
1825        lmax = searchsorted(longitudes, maxlon)
1826
1827
1828
1829    times = times[jmin:jmax]
1830    latitudes = latitudes[kmin:kmax]
1831    longitudes = longitudes[lmin:lmax]
1832
1833
1834    if verbose: print 'cropping'
1835    zname = 'ELEVATION'
1836
1837   
1838    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1839    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1840    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1841    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1842
1843#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1844#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1845#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1846#        from Numeric import asarray
1847#        elevations=elevations.tolist()
1848#        elevations.reverse()
1849#        elevations=asarray(elevations)
1850#    else:
1851#        from Numeric import asarray
1852#        elevations=elevations.tolist()
1853#        elevations.reverse()
1854#        elevations=asarray(elevations)
1855#        'print hmmm'
1856
1857
1858
1859    #Get missing values
1860    nan_ha = file_h.variables['HA'].missing_value[0]
1861    nan_ua = file_u.variables['UA'].missing_value[0]
1862    nan_va = file_v.variables['VA'].missing_value[0]
1863    if hasattr(file_e.variables[zname],'missing_value'):
1864        nan_e  = file_e.variables[zname].missing_value[0]
1865    else:
1866        nan_e = None
1867
1868    #Cleanup
1869    from Numeric import sometrue
1870
1871    missing = (amplitudes == nan_ha)
1872    if sometrue (missing):
1873        if fail_on_NaN:
1874            msg = 'NetCDFFile %s contains missing values'\
1875                  %(basename_in+'_ha.nc')
1876            raise msg
1877        else:
1878            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1879
1880    missing = (uspeed == nan_ua)
1881    if sometrue (missing):
1882        if fail_on_NaN:
1883            msg = 'NetCDFFile %s contains missing values'\
1884                  %(basename_in+'_ua.nc')
1885            raise msg
1886        else:
1887            uspeed = uspeed*(missing==0) + missing*NaN_filler
1888
1889    missing = (vspeed == nan_va)
1890    if sometrue (missing):
1891        if fail_on_NaN:
1892            msg = 'NetCDFFile %s contains missing values'\
1893                  %(basename_in+'_va.nc')
1894            raise msg
1895        else:
1896            vspeed = vspeed*(missing==0) + missing*NaN_filler
1897
1898
1899    missing = (elevations == nan_e)
1900    if sometrue (missing):
1901        if fail_on_NaN:
1902            msg = 'NetCDFFile %s contains missing values'\
1903                  %(basename_in+'_e.nc')
1904            raise msg
1905        else:
1906            elevations = elevations*(missing==0) + missing*NaN_filler
1907
1908    #######
1909
1910
1911
1912    number_of_times = times.shape[0]
1913    number_of_latitudes = latitudes.shape[0]
1914    number_of_longitudes = longitudes.shape[0]
1915
1916    assert amplitudes.shape[0] == number_of_times
1917    assert amplitudes.shape[1] == number_of_latitudes
1918    assert amplitudes.shape[2] == number_of_longitudes
1919
1920    if verbose:
1921        print '------------------------------------------------'
1922        print 'Statistics:'
1923        print '  Extent (lat/lon):'
1924        print '    lat in [%f, %f], len(lat) == %d'\
1925              %(min(latitudes.flat), max(latitudes.flat),
1926                len(latitudes.flat))
1927        print '    lon in [%f, %f], len(lon) == %d'\
1928              %(min(longitudes.flat), max(longitudes.flat),
1929                len(longitudes.flat))
1930        print '    t in [%f, %f], len(t) == %d'\
1931              %(min(times.flat), max(times.flat), len(times.flat))
1932
1933        q = amplitudes.flat
1934        name = 'Amplitudes (ha) [cm]'
1935        print %s in [%f, %f]' %(name, min(q), max(q))
1936
1937        q = uspeed.flat
1938        name = 'Speeds (ua) [cm/s]'
1939        print %s in [%f, %f]' %(name, min(q), max(q))
1940
1941        q = vspeed.flat
1942        name = 'Speeds (va) [cm/s]'
1943        print %s in [%f, %f]' %(name, min(q), max(q))
1944
1945        q = elevations.flat
1946        name = 'Elevations (e) [m]'
1947        print %s in [%f, %f]' %(name, min(q), max(q))
1948
1949
1950    #print number_of_latitudes, number_of_longitudes
1951    number_of_points = number_of_latitudes*number_of_longitudes
1952    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1953
1954
1955    file_h.close()
1956    file_u.close()
1957    file_v.close()
1958    file_e.close()
1959
1960
1961    # NetCDF file definition
1962    outfile = NetCDFFile(swwname, 'w')
1963
1964    #Create new file
1965    outfile.institution = 'Geoscience Australia'
1966    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1967                          %(basename_in + '_ha.nc',
1968                            basename_in + '_ua.nc',
1969                            basename_in + '_va.nc',
1970                            basename_in + '_e.nc')
1971
1972
1973    #For sww compatibility
1974    outfile.smoothing = 'Yes'
1975    outfile.order = 1
1976
1977    #Start time in seconds since the epoch (midnight 1/1/1970)
1978    outfile.starttime = starttime = times[0]
1979    times = times - starttime  #Store relative times
1980
1981    # dimension definitions
1982    outfile.createDimension('number_of_volumes', number_of_volumes)
1983
1984    outfile.createDimension('number_of_vertices', 3)
1985    outfile.createDimension('number_of_points', number_of_points)
1986
1987
1988    #outfile.createDimension('number_of_timesteps', len(times))
1989    outfile.createDimension('number_of_timesteps', len(times))
1990
1991    # variable definitions
1992    outfile.createVariable('x', precision, ('number_of_points',))
1993    outfile.createVariable('y', precision, ('number_of_points',))
1994    outfile.createVariable('elevation', precision, ('number_of_points',))
1995
1996    #FIXME: Backwards compatibility
1997    outfile.createVariable('z', precision, ('number_of_points',))
1998    #################################
1999
2000    outfile.createVariable('volumes', Int, ('number_of_volumes',
2001                                            'number_of_vertices'))
2002
2003    outfile.createVariable('time', precision,
2004                           ('number_of_timesteps',))
2005
2006    outfile.createVariable('stage', precision,
2007                           ('number_of_timesteps',
2008                            'number_of_points'))
2009
2010    outfile.createVariable('xmomentum', precision,
2011                           ('number_of_timesteps',
2012                            'number_of_points'))
2013
2014    outfile.createVariable('ymomentum', precision,
2015                           ('number_of_timesteps',
2016                            'number_of_points'))
2017
2018
2019    #Store
2020    from coordinate_transforms.redfearn import redfearn
2021    x = zeros(number_of_points, Float)  #Easting
2022    y = zeros(number_of_points, Float)  #Northing
2023
2024
2025    if verbose: print 'Making triangular grid'
2026    #Check zone boundaries
2027    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2028
2029    vertices = {}
2030    i = 0
2031    for k, lat in enumerate(latitudes):       #Y direction
2032        for l, lon in enumerate(longitudes):  #X direction
2033
2034            vertices[l,k] = i
2035
2036            zone, easting, northing = redfearn(lat,lon)
2037
2038            msg = 'Zone boundary crossed at longitude =', lon
2039            #assert zone == refzone, msg
2040            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2041            x[i] = easting
2042            y[i] = northing
2043            i += 1
2044
2045
2046    #Construct 2 triangles per 'rectangular' element
2047    volumes = []
2048    for l in range(number_of_longitudes-1):    #X direction
2049        for k in range(number_of_latitudes-1): #Y direction
2050            v1 = vertices[l,k+1]
2051            v2 = vertices[l,k]
2052            v3 = vertices[l+1,k+1]
2053            v4 = vertices[l+1,k]
2054
2055            volumes.append([v1,v2,v3]) #Upper element
2056            volumes.append([v4,v3,v2]) #Lower element
2057
2058    volumes = array(volumes)
2059
2060    if origin == None:
2061        zone = refzone
2062        xllcorner = min(x)
2063        yllcorner = min(y)
2064    else:
2065        zone = origin[0]
2066        xllcorner = origin[1]
2067        yllcorner = origin[2]
2068
2069
2070    outfile.xllcorner = xllcorner
2071    outfile.yllcorner = yllcorner
2072    outfile.zone = zone
2073
2074
2075    if elevation is not None:
2076        z = elevation
2077    else:
2078        if inverted_bathymetry:
2079            z = -1*elevations
2080        else:
2081            z = elevations
2082        #FIXME: z should be obtained from MOST and passed in here
2083
2084    from Numeric import resize
2085    z = resize(z,outfile.variables['z'][:].shape)
2086    outfile.variables['x'][:] = x - xllcorner
2087    outfile.variables['y'][:] = y - yllcorner
2088    outfile.variables['z'][:] = z
2089    outfile.variables['elevation'][:] = z  #FIXME HACK
2090    outfile.variables['time'][:] = times   #Store time relative
2091    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2092
2093
2094
2095    #Time stepping
2096    stage = outfile.variables['stage']
2097    xmomentum = outfile.variables['xmomentum']
2098    ymomentum = outfile.variables['ymomentum']
2099
2100    if verbose: print 'Converting quantities'
2101    n = len(times)
2102    for j in range(n):
2103        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2104        i = 0
2105        for k in range(number_of_latitudes):      #Y direction
2106            for l in range(number_of_longitudes): #X direction
2107                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2108                stage[j,i] = w
2109                h = w - z[i]
2110                xmomentum[j,i] = uspeed[j,k,l]/100*h
2111                ymomentum[j,i] = vspeed[j,k,l]/100*h
2112                i += 1
2113
2114
2115    if verbose:
2116        x = outfile.variables['x'][:]
2117        y = outfile.variables['y'][:]
2118        print '------------------------------------------------'
2119        print 'Statistics of output file:'
2120        print '  Name: %s' %swwname
2121        print '  Reference:'
2122        print '    Lower left corner: [%f, %f]'\
2123              %(xllcorner, yllcorner)
2124        print '    Start time: %f' %starttime
2125        print '  Extent:'
2126        print '    x [m] in [%f, %f], len(x) == %d'\
2127              %(min(x.flat), max(x.flat), len(x.flat))
2128        print '    y [m] in [%f, %f], len(y) == %d'\
2129              %(min(y.flat), max(y.flat), len(y.flat))
2130        print '    t [s] in [%f, %f], len(t) == %d'\
2131              %(min(times), max(times), len(times))
2132        print '  Quantities [SI units]:'
2133        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2134            q = outfile.variables[name][:].flat
2135            print '    %s in [%f, %f]' %(name, min(q), max(q))
2136
2137
2138
2139
2140    outfile.close()
2141
2142
2143
2144
2145def timefile2netcdf(filename, quantity_names = None):
2146    """Template for converting typical text files with time series to
2147    NetCDF tms file.
2148
2149
2150    The file format is assumed to be either two fields separated by a comma:
2151
2152        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2153
2154    E.g
2155
2156      31/08/04 00:00:00, 1.328223 0 0
2157      31/08/04 00:15:00, 1.292912 0 0
2158
2159    will provide a time dependent function f(t) with three attributes
2160
2161    filename is assumed to be the rootname with extenisons .txt and .sww
2162    """
2163
2164    import time, calendar
2165    from Numeric import array
2166    from config import time_format
2167    from util import ensure_numeric
2168
2169
2170    fid = open(filename + '.txt')
2171    line = fid.readline()
2172    fid.close()
2173
2174    fields = line.split(',')
2175    msg = 'File %s must have the format date, value0 value1 value2 ...'
2176    assert len(fields) == 2, msg
2177
2178    try:
2179        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2180    except ValueError:
2181        msg = 'First field in file %s must be' %filename
2182        msg += ' date-time with format %s.\n' %time_format
2183        msg += 'I got %s instead.' %fields[0]
2184        raise msg
2185
2186
2187    #Split values
2188    values = []
2189    for value in fields[1].split():
2190        values.append(float(value))
2191
2192    q = ensure_numeric(values)
2193
2194    msg = 'ERROR: File must contain at least one independent value'
2195    assert len(q.shape) == 1, msg
2196
2197
2198
2199    #Read times proper
2200    from Numeric import zeros, Float, alltrue
2201    from config import time_format
2202    import time, calendar
2203
2204    fid = open(filename + '.txt')
2205    lines = fid.readlines()
2206    fid.close()
2207
2208    N = len(lines)
2209    d = len(q)
2210
2211    T = zeros(N, Float)       #Time
2212    Q = zeros((N, d), Float)  #Values
2213
2214    for i, line in enumerate(lines):
2215        fields = line.split(',')
2216        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2217       
2218        T[i] = realtime - starttime
2219
2220        for j, value in enumerate(fields[1].split()):
2221            Q[i, j] = float(value)
2222
2223    msg = 'File %s must list time as a monotonuosly ' %filename
2224    msg += 'increasing sequence'
2225    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2226
2227
2228    #Create NetCDF file
2229    from Scientific.IO.NetCDF import NetCDFFile
2230
2231    fid = NetCDFFile(filename + '.tms', 'w')
2232
2233
2234    fid.institution = 'Geoscience Australia'
2235    fid.description = 'Time series'
2236
2237
2238    #Reference point
2239    #Start time in seconds since the epoch (midnight 1/1/1970)
2240    #FIXME: Use Georef
2241    fid.starttime = starttime
2242   
2243   
2244    # dimension definitions
2245    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2246    #fid.createDimension('number_of_vertices', 3)
2247
2248
2249    fid.createDimension('number_of_timesteps', len(T))
2250
2251    fid.createVariable('time', Float, ('number_of_timesteps',))
2252
2253    fid.variables['time'][:] = T
2254   
2255    for i in range(Q.shape[1]):
2256        try:
2257            name = quantity_names[i]
2258        except:   
2259            name = 'Attribute%d'%i
2260           
2261        fid.createVariable(name, Float, ('number_of_timesteps',))
2262        fid.variables[name][:] = Q[:,i]       
2263
2264    fid.close()
2265
2266
2267def extent_sww(file_name):
2268    """
2269    Read in an sww file.
2270
2271    Input;
2272    file_name - the sww file
2273
2274    Output;
2275    z - Vector of bed elevation
2276    volumes - Array.  Each row has 3 values, representing
2277    the vertices that define the volume
2278    time - Vector of the times where there is stage information
2279    stage - array with respect to time and vertices (x,y)
2280    """
2281
2282
2283    from Scientific.IO.NetCDF import NetCDFFile
2284
2285    #Check contents
2286    #Get NetCDF
2287    fid = NetCDFFile(file_name, 'r')
2288
2289    # Get the variables
2290    x = fid.variables['x'][:]
2291    y = fid.variables['y'][:]
2292    stage = fid.variables['stage'][:]
2293    #print "stage",stage
2294    #print "stage.shap",stage.shape
2295    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2296    #print "min(stage)",min(stage)
2297
2298    fid.close()
2299
2300    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2301
2302
2303def sww2domain(filename,boundary=None,t=None,\
2304               fail_if_NaN=True,NaN_filler=0\
2305               ,verbose = True,very_verbose = False):
2306    """
2307    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2308
2309    Boundary is not recommended if domian.smooth is not selected, as it
2310    uses unique coordinates, but not unique boundaries. This means that
2311    the boundary file will not be compatable with the coordiantes, and will
2312    give a different final boundary, or crash.
2313    """
2314    NaN=9.969209968386869e+036
2315    #initialise NaN.
2316
2317    from Scientific.IO.NetCDF import NetCDFFile
2318    from shallow_water import Domain
2319    from Numeric import asarray, transpose, resize
2320
2321    if verbose: print 'Reading from ', filename
2322    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2323    time = fid.variables['time']       #Timesteps
2324    if t is None:
2325        t = time[-1]
2326    time_interp = get_time_interp(time,t)
2327
2328    # Get the variables as Numeric arrays
2329    x = fid.variables['x'][:]             #x-coordinates of vertices
2330    y = fid.variables['y'][:]             #y-coordinates of vertices
2331    elevation = fid.variables['elevation']     #Elevation
2332    stage = fid.variables['stage']     #Water level
2333    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2334    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2335
2336    starttime = fid.starttime[0]
2337    volumes = fid.variables['volumes'][:] #Connectivity
2338    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2339
2340    conserved_quantities = []
2341    interpolated_quantities = {}
2342    other_quantities = []
2343
2344    # get geo_reference
2345    #sww files don't have to have a geo_ref
2346    try:
2347        geo_reference = Geo_reference(NetCDFObject=fid)
2348    except: #AttributeError, e:
2349        geo_reference = None
2350
2351    if verbose: print '    getting quantities'
2352    for quantity in fid.variables.keys():
2353        dimensions = fid.variables[quantity].dimensions
2354        if 'number_of_timesteps' in dimensions:
2355            conserved_quantities.append(quantity)
2356            interpolated_quantities[quantity]=\
2357                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2358        else: other_quantities.append(quantity)
2359
2360    other_quantities.remove('x')
2361    other_quantities.remove('y')
2362    other_quantities.remove('z')
2363    other_quantities.remove('volumes')
2364
2365    conserved_quantities.remove('time')
2366
2367    if verbose: print '    building domain'
2368#    From domain.Domain:
2369#    domain = Domain(coordinates, volumes,\
2370#                    conserved_quantities = conserved_quantities,\
2371#                    other_quantities = other_quantities,zone=zone,\
2372#                    xllcorner=xllcorner, yllcorner=yllcorner)
2373
2374#   From shallow_water.Domain:
2375    coordinates=coordinates.tolist()
2376    volumes=volumes.tolist()
2377    #FIXME:should this be in mesh?(peter row)
2378    if fid.smoothing == 'Yes': unique = False
2379    else: unique = True
2380    if unique:
2381        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2382
2383
2384    domain = Domain(coordinates, volumes, boundary)
2385
2386    if not boundary is None:
2387        domain.boundary = boundary
2388
2389    domain.geo_reference = geo_reference
2390
2391    domain.starttime=float(starttime)+float(t)
2392    domain.time=0.0
2393
2394    for quantity in other_quantities:
2395        try:
2396            NaN = fid.variables[quantity].missing_value
2397        except:
2398            pass #quantity has no missing_value number
2399        X = fid.variables[quantity][:]
2400        if very_verbose:
2401            print '       ',quantity
2402            print '        NaN =',NaN
2403            print '        max(X)'
2404            print '       ',max(X)
2405            print '        max(X)==NaN'
2406            print '       ',max(X)==NaN
2407            print ''
2408        if (max(X)==NaN) or (min(X)==NaN):
2409            if fail_if_NaN:
2410                msg = 'quantity "%s" contains no_data entry'%quantity
2411                raise msg
2412            else:
2413                data = (X<>NaN)
2414                X = (X*data)+(data==0)*NaN_filler
2415        if unique:
2416            X = resize(X,(len(X)/3,3))
2417        domain.set_quantity(quantity,X)
2418#
2419    for quantity in conserved_quantities:
2420        try:
2421            NaN = fid.variables[quantity].missing_value
2422        except:
2423            pass #quantity has no missing_value number
2424        X = interpolated_quantities[quantity]
2425        if very_verbose:
2426            print '       ',quantity
2427            print '        NaN =',NaN
2428            print '        max(X)'
2429            print '       ',max(X)
2430            print '        max(X)==NaN'
2431            print '       ',max(X)==NaN
2432            print ''
2433        if (max(X)==NaN) or (min(X)==NaN):
2434            if fail_if_NaN:
2435                msg = 'quantity "%s" contains no_data entry'%quantity
2436                raise msg
2437            else:
2438                data = (X<>NaN)
2439                X = (X*data)+(data==0)*NaN_filler
2440        if unique:
2441            X = resize(X,(X.shape[0]/3,3))
2442        domain.set_quantity(quantity,X)
2443    fid.close()
2444    return domain
2445
2446def interpolated_quantity(saved_quantity,time_interp):
2447
2448    #given an index and ratio, interpolate quantity with respect to time.
2449    index,ratio = time_interp
2450    Q = saved_quantity
2451    if ratio > 0:
2452        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2453    else:
2454        q = Q[index]
2455    #Return vector of interpolated values
2456    return q
2457
2458def get_time_interp(time,t=None):
2459    #Finds the ratio and index for time interpolation.
2460    #It is borrowed from previous pyvolution code.
2461    if t is None:
2462        t=time[-1]
2463        index = -1
2464        ratio = 0.
2465    else:
2466        T = time
2467        tau = t
2468        index=0
2469        msg = 'Time interval derived from file %s [%s:%s]'\
2470            %('FIXMEfilename', T[0], T[-1])
2471        msg += ' does not match model time: %s' %tau
2472        if tau < time[0]: raise msg
2473        if tau > time[-1]: raise msg
2474        while tau > time[index]: index += 1
2475        while tau < time[index]: index -= 1
2476        if tau == time[index]:
2477            #Protect against case where tau == time[-1] (last time)
2478            # - also works in general when tau == time[i]
2479            ratio = 0
2480        else:
2481            #t is now between index and index+1
2482            ratio = (tau - time[index])/(time[index+1] - time[index])
2483    return (index,ratio)
2484
2485
2486def weed(coordinates,volumes,boundary = None):
2487    if type(coordinates)=='array':
2488        coordinates = coordinates.tolist()
2489    if type(volumes)=='array':
2490        volumes = volumes.tolist()
2491
2492    unique = False
2493    point_dict = {}
2494    same_point = {}
2495    for i in range(len(coordinates)):
2496        point = tuple(coordinates[i])
2497        if point_dict.has_key(point):
2498            unique = True
2499            same_point[i]=point
2500            #to change all point i references to point j
2501        else:
2502            point_dict[point]=i
2503            same_point[i]=point
2504
2505    coordinates = []
2506    i = 0
2507    for point in point_dict.keys():
2508        point = tuple(point)
2509        coordinates.append(list(point))
2510        point_dict[point]=i
2511        i+=1
2512
2513
2514    for volume in volumes:
2515        for i in range(len(volume)):
2516            index = volume[i]
2517            if index>-1:
2518                volume[i]=point_dict[same_point[index]]
2519
2520    new_boundary = {}
2521    if not boundary is None:
2522        for segment in boundary.keys():
2523            point0 = point_dict[same_point[segment[0]]]
2524            point1 = point_dict[same_point[segment[1]]]
2525            label = boundary[segment]
2526            #FIXME should the bounday attributes be concaterated
2527            #('exterior, pond') or replaced ('pond')(peter row)
2528
2529            if new_boundary.has_key((point0,point1)):
2530                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2531                                              #+','+label
2532
2533            elif new_boundary.has_key((point1,point0)):
2534                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2535                                              #+','+label
2536            else: new_boundary[(point0,point1)]=label
2537
2538        boundary = new_boundary
2539
2540    return coordinates,volumes,boundary
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551###############################################
2552#OBSOLETE STUFF
2553#Native checkpoint format.
2554#Information needed to recreate a state is preserved
2555#FIXME: Rethink and maybe use netcdf format
2556def cpt_variable_writer(filename, t, v0, v1, v2):
2557    """Store all conserved quantities to file
2558    """
2559
2560    M, N = v0.shape
2561
2562    FN = create_filename(filename, 'cpt', M, t)
2563    #print 'Writing to %s' %FN
2564
2565    fid = open(FN, 'w')
2566    for i in range(M):
2567        for j in range(N):
2568            fid.write('%.16e ' %v0[i,j])
2569        for j in range(N):
2570            fid.write('%.16e ' %v1[i,j])
2571        for j in range(N):
2572            fid.write('%.16e ' %v2[i,j])
2573
2574        fid.write('\n')
2575    fid.close()
2576
2577
2578def cpt_variable_reader(filename, t, v0, v1, v2):
2579    """Store all conserved quantities to file
2580    """
2581
2582    M, N = v0.shape
2583
2584    FN = create_filename(filename, 'cpt', M, t)
2585    #print 'Reading from %s' %FN
2586
2587    fid = open(FN)
2588
2589
2590    for i in range(M):
2591        values = fid.readline().split() #Get one line
2592
2593        for j in range(N):
2594            v0[i,j] = float(values[j])
2595            v1[i,j] = float(values[3+j])
2596            v2[i,j] = float(values[6+j])
2597
2598    fid.close()
2599
2600def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2601    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2602    elevation.
2603    FIXME: Not in use pt
2604    """
2605
2606    M, N = v0.shape
2607
2608
2609    print X0
2610    import sys; sys.exit()
2611    FN = create_filename(filename, 'cpt', M)
2612    print 'Writing to %s' %FN
2613
2614    fid = open(FN, 'w')
2615    for i in range(M):
2616        for j in range(2):
2617            fid.write('%.16e ' %X0[i,j])   #x, y
2618        for j in range(N):
2619            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2620
2621        for j in range(2):
2622            fid.write('%.16e ' %X1[i,j])   #x, y
2623        for j in range(N):
2624            fid.write('%.16e ' %v1[i,j])
2625
2626        for j in range(2):
2627            fid.write('%.16e ' %X2[i,j])   #x, y
2628        for j in range(N):
2629            fid.write('%.16e ' %v2[i,j])
2630
2631        fid.write('\n')
2632    fid.close()
2633
2634
2635
2636#Function for storing out to e.g. visualisation
2637#FIXME: Do we want this?
2638#FIXME: Not done yet for this version
2639def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2640    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2641    """
2642
2643    M, N = v0.shape
2644
2645    FN = create_filename(filename, 'dat', M)
2646    #print 'Writing to %s' %FN
2647
2648    fid = open(FN, 'w')
2649    for i in range(M):
2650        for j in range(2):
2651            fid.write('%f ' %X0[i,j])   #x, y
2652        fid.write('%f ' %v0[i,0])       #z
2653
2654        for j in range(2):
2655            fid.write('%f ' %X1[i,j])   #x, y
2656        fid.write('%f ' %v1[i,0])       #z
2657
2658        for j in range(2):
2659            fid.write('%f ' %X2[i,j])   #x, y
2660        fid.write('%f ' %v2[i,0])       #z
2661
2662        fid.write('\n')
2663    fid.close()
2664
2665
2666
2667def dat_variable_writer(filename, t, v0, v1, v2):
2668    """Store water height to file
2669    """
2670
2671    M, N = v0.shape
2672
2673    FN = create_filename(filename, 'dat', M, t)
2674    #print 'Writing to %s' %FN
2675
2676    fid = open(FN, 'w')
2677    for i in range(M):
2678        fid.write('%.4f ' %v0[i,0])
2679        fid.write('%.4f ' %v1[i,0])
2680        fid.write('%.4f ' %v2[i,0])
2681
2682        fid.write('\n')
2683    fid.close()
2684
2685
2686def read_sww(filename):
2687    """Read sww Net CDF file containing Shallow Water Wave simulation
2688
2689    The integer array volumes is of shape Nx3 where N is the number of
2690    triangles in the mesh.
2691
2692    Each entry in volumes is an index into the x,y arrays (the location).
2693
2694    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2695    number_of_timesteps, number_of_points.
2696
2697    The momentum is not always stored.
2698
2699    """
2700    from Scientific.IO.NetCDF import NetCDFFile
2701    print 'Reading from ', filename
2702    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2703#latitude, longitude
2704    # Get the variables as Numeric arrays
2705    x = fid.variables['x']             #x-coordinates of vertices
2706    y = fid.variables['y']             #y-coordinates of vertices
2707    z = fid.variables['elevation']     #Elevation
2708    time = fid.variables['time']       #Timesteps
2709    stage = fid.variables['stage']     #Water level
2710    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2711    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2712
2713    volumes = fid.variables['volumes'] #Connectivity
2714
2715
2716def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2717                 verbose=False):
2718    """Read Digitial Elevation model from the following NetCDF format (.dem)
2719
2720    Example:
2721
2722    ncols         3121
2723    nrows         1800
2724    xllcorner     722000
2725    yllcorner     5893000
2726    cellsize      25
2727    NODATA_value  -9999
2728    138.3698 137.4194 136.5062 135.5558 ..........
2729
2730    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2731    """
2732
2733    import os
2734    from Scientific.IO.NetCDF import NetCDFFile
2735    from Numeric import Float, zeros, sum, reshape, equal
2736
2737    root = basename_in
2738    inname = root + '.dem'
2739
2740    #Open existing netcdf file to read
2741    infile = NetCDFFile(inname, 'r')
2742    if verbose: print 'Reading DEM from %s' %inname
2743
2744    #Read metadata
2745    ncols = infile.ncols[0]
2746    nrows = infile.nrows[0]
2747    xllcorner = infile.xllcorner[0]
2748    yllcorner = infile.yllcorner[0]
2749    cellsize = infile.cellsize[0]
2750    NODATA_value = infile.NODATA_value[0]
2751    zone = infile.zone[0]
2752    false_easting = infile.false_easting[0]
2753    false_northing = infile.false_northing[0]
2754    projection = infile.projection
2755    datum = infile.datum
2756    units = infile.units
2757
2758    dem_elevation = infile.variables['elevation']
2759
2760    #Get output file name
2761    if basename_out == None:
2762        outname = root + '_' + repr(cellsize_new) + '.dem'
2763    else:
2764        outname = basename_out + '.dem'
2765
2766    if verbose: print 'Write decimated NetCDF file to %s' %outname
2767
2768    #Determine some dimensions for decimated grid
2769    (nrows_stencil, ncols_stencil) = stencil.shape
2770    x_offset = ncols_stencil / 2
2771    y_offset = nrows_stencil / 2
2772    cellsize_ratio = int(cellsize_new / cellsize)
2773    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2774    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2775
2776    #Open netcdf file for output
2777    outfile = NetCDFFile(outname, 'w')
2778
2779    #Create new file
2780    outfile.institution = 'Geoscience Australia'
2781    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2782                           'of spatial point data'
2783    #Georeferencing
2784    outfile.zone = zone
2785    outfile.projection = projection
2786    outfile.datum = datum
2787    outfile.units = units
2788
2789    outfile.cellsize = cellsize_new
2790    outfile.NODATA_value = NODATA_value
2791    outfile.false_easting = false_easting
2792    outfile.false_northing = false_northing
2793
2794    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2795    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2796    outfile.ncols = ncols_new
2797    outfile.nrows = nrows_new
2798
2799    # dimension definition
2800    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2801
2802    # variable definition
2803    outfile.createVariable('elevation', Float, ('number_of_points',))
2804
2805    # Get handle to the variable
2806    elevation = outfile.variables['elevation']
2807
2808    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2809
2810    #Store data
2811    global_index = 0
2812    for i in range(nrows_new):
2813        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2814        lower_index = global_index
2815        telev =  zeros(ncols_new, Float)
2816        local_index = 0
2817        trow = i * cellsize_ratio
2818
2819        for j in range(ncols_new):
2820            tcol = j * cellsize_ratio
2821            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2822
2823            #if dem contains 1 or more NODATA_values set value in
2824            #decimated dem to NODATA_value, else compute decimated
2825            #value using stencil
2826            if sum(sum(equal(tmp, NODATA_value))) > 0:
2827                telev[local_index] = NODATA_value
2828            else:
2829                telev[local_index] = sum(sum(tmp * stencil))
2830
2831            global_index += 1
2832            local_index += 1
2833
2834        upper_index = global_index
2835
2836        elevation[lower_index:upper_index] = telev
2837
2838    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2839
2840    infile.close()
2841    outfile.close()
2842
2843
2844   
2845def sww2asc_obsolete(basename_in, basename_out = None,
2846            quantity = None,
2847            timestep = None,
2848            reduction = None,
2849            cellsize = 10,
2850            verbose = False,
2851            origin = None):
2852    """Read SWW file and convert to Digitial Elevation model format (.asc)
2853
2854    Example:
2855
2856    ncols         3121
2857    nrows         1800
2858    xllcorner     722000
2859    yllcorner     5893000
2860    cellsize      25
2861    NODATA_value  -9999
2862    138.3698 137.4194 136.5062 135.5558 ..........
2863
2864    Also write accompanying file with same basename_in but extension .prj
2865    used to fix the UTM zone, datum, false northings and eastings.
2866
2867    The prj format is assumed to be as
2868
2869    Projection    UTM
2870    Zone          56
2871    Datum         WGS84
2872    Zunits        NO
2873    Units         METERS
2874    Spheroid      WGS84
2875    Xshift        0.0000000000
2876    Yshift        10000000.0000000000
2877    Parameters
2878
2879
2880    if quantity is given, out values from quantity otherwise default to
2881    elevation
2882
2883    if timestep (an index) is given, output quantity at that timestep
2884
2885    if reduction is given use that to reduce quantity over all timesteps.
2886
2887    """
2888    from Numeric import array, Float, concatenate, NewAxis, zeros,\
2889         sometrue
2890    from utilities.polygon import inside_polygon
2891
2892    #FIXME: Should be variable
2893    datum = 'WGS84'
2894    false_easting = 500000
2895    false_northing = 10000000
2896
2897    if quantity is None:
2898        quantity = 'elevation'
2899
2900    if reduction is None:
2901        reduction = max
2902
2903    if basename_out is None:
2904        basename_out = basename_in + '_%s' %quantity
2905
2906    swwfile = basename_in + '.sww'
2907    ascfile = basename_out + '.asc'
2908    prjfile = basename_out + '.prj'
2909
2910
2911    if verbose: print 'Reading from %s' %swwfile
2912    #Read sww file
2913    from Scientific.IO.NetCDF import NetCDFFile
2914    fid = NetCDFFile(swwfile)
2915
2916    #Get extent and reference
2917    x = fid.variables['x'][:]
2918    y = fid.variables['y'][:]
2919    volumes = fid.variables['volumes'][:]
2920
2921    ymin = min(y); ymax = max(y)
2922    xmin = min(x); xmax = max(x)
2923
2924    number_of_timesteps = fid.dimensions['number_of_timesteps']
2925    number_of_points = fid.dimensions['number_of_points']
2926    if origin is None:
2927
2928        #Get geo_reference
2929        #sww files don't have to have a geo_ref
2930        try:
2931            geo_reference = Geo_reference(NetCDFObject=fid)
2932        except AttributeError, e:
2933            geo_reference = Geo_reference() #Default georef object
2934
2935        xllcorner = geo_reference.get_xllcorner()
2936        yllcorner = geo_reference.get_yllcorner()
2937        zone = geo_reference.get_zone()
2938    else:
2939        zone = origin[0]
2940        xllcorner = origin[1]
2941        yllcorner = origin[2]
2942
2943
2944    #Get quantity and reduce if applicable
2945    if verbose: print 'Reading quantity %s' %quantity
2946
2947    if quantity.lower() == 'depth':
2948        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2949    else:
2950        q = fid.variables[quantity][:]
2951
2952
2953    if len(q.shape) == 2:
2954        if verbose: print 'Reducing quantity %s' %quantity
2955        q_reduced = zeros( number_of_points, Float )
2956
2957        for k in range(number_of_points):
2958            q_reduced[k] = reduction( q[:,k] )
2959
2960        q = q_reduced
2961
2962    #Now q has dimension: number_of_points
2963
2964    #Create grid and update xll/yll corner and x,y
2965    if verbose: print 'Creating grid'
2966    ncols = int((xmax-xmin)/cellsize)+1
2967    nrows = int((ymax-ymin)/cellsize)+1
2968
2969    newxllcorner = xmin+xllcorner
2970    newyllcorner = ymin+yllcorner
2971
2972    x = x+xllcorner-newxllcorner
2973    y = y+yllcorner-newyllcorner
2974
2975    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2976    assert len(vertex_points.shape) == 2
2977
2978
2979    from Numeric import zeros, Float
2980    grid_points = zeros ( (ncols*nrows, 2), Float )
2981
2982
2983    for i in xrange(nrows):
2984        yg = i*cellsize
2985        for j in xrange(ncols):
2986            xg = j*cellsize
2987            k = i*ncols + j
2988
2989            grid_points[k,0] = xg
2990            grid_points[k,1] = yg
2991
2992    #Interpolate
2993    from least_squares import Interpolation
2994   
2995
2996    #FIXME: This should be done with precrop = True, otherwise it'll
2997    #take forever. With expand_search  set to False, some grid points might
2998    #miss out....
2999
3000    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3001                           precrop = False, expand_search = False,
3002                           verbose = verbose)
3003
3004    #Interpolate using quantity values
3005    if verbose: print 'Interpolating'
3006    grid_values = interp.interpolate(q).flat
3007
3008    #Write
3009    #Write prj file
3010    if verbose: print 'Writing %s' %prjfile
3011    prjid = open(prjfile, 'w')
3012    prjid.write('Projection    %s\n' %'UTM')
3013    prjid.write('Zone          %d\n' %zone)
3014    prjid.write('Datum         %s\n' %datum)
3015    prjid.write('Zunits        NO\n')
3016    prjid.write('Units         METERS\n')
3017    prjid.write('Spheroid      %s\n' %datum)
3018    prjid.write('Xshift        %d\n' %false_easting)
3019    prjid.write('Yshift        %d\n' %false_northing)
3020    prjid.write('Parameters\n')
3021    prjid.close()
3022
3023
3024   
3025    if verbose: print 'Writing %s' %ascfile
3026    NODATA_value = -9999
3027 
3028    ascid = open(ascfile, 'w')
3029
3030    ascid.write('ncols         %d\n' %ncols)
3031    ascid.write('nrows         %d\n' %nrows)
3032    ascid.write('xllcorner     %d\n' %newxllcorner)
3033    ascid.write('yllcorner     %d\n' %newyllcorner)
3034    ascid.write('cellsize      %f\n' %cellsize)
3035    ascid.write('NODATA_value  %d\n' %NODATA_value)
3036
3037
3038    #Get bounding polygon from mesh
3039    P = interp.mesh.get_boundary_polygon()
3040    inside_indices = inside_polygon(grid_points, P)
3041
3042    for i in range(nrows):
3043        if verbose and i%((nrows+10)/10)==0:
3044            print 'Doing row %d of %d' %(i, nrows)
3045
3046        for j in range(ncols):
3047            index = (nrows-i-1)*ncols+j
3048
3049            if sometrue(inside_indices == index):
3050                ascid.write('%f ' %grid_values[index])
3051            else:
3052                ascid.write('%d ' %NODATA_value)
3053
3054        ascid.write('\n')
3055
3056    #Close
3057    ascid.close()
3058    fid.close()
3059
3060def sww2ers_obsolete(basename_in, basename_out = None,
3061            quantity = None,
3062            timestep = None,
3063            reduction = None,
3064            cellsize = 10,
3065            verbose = False,
3066            origin = None,
3067            datum = 'WGS84'):
3068   
3069    """Read SWW file and convert to Digitial Elevation model format (.asc)
3070
3071    Example:
3072
3073    ncols         3121
3074    nrows         1800
3075    xllcorner     722000
3076    yllcorner     5893000
3077    cellsize      25
3078    NODATA_value  -9999
3079    138.3698 137.4194 136.5062 135.5558 ..........
3080
3081    Also write accompanying file with same basename_in but extension .prj
3082    used to fix the UTM zone, datum, false northings and eastings.
3083
3084    The prj format is assumed to be as
3085
3086    Projection    UTM
3087    Zone          56
3088    Datum         WGS84
3089    Zunits        NO
3090    Units         METERS
3091    Spheroid      WGS84
3092    Xshift        0.0000000000
3093    Yshift        10000000.0000000000
3094    Parameters
3095
3096
3097    if quantity is given, out values from quantity otherwise default to
3098    elevation
3099
3100    if timestep (an index) is given, output quantity at that timestep
3101
3102    if reduction is given use that to reduce quantity over all timesteps.
3103
3104    """
3105    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
3106    import ermapper_grids
3107    from utilities.polygon import inside_polygon   
3108
3109    header = {}
3110    false_easting = 500000
3111    false_northing = 10000000
3112    NODATA_value = 0
3113
3114    if quantity is None:
3115        quantity = 'elevation'
3116
3117    if reduction is None:
3118        reduction = max
3119
3120    if basename_out is None:
3121        basename_out = basename_in + '_%s' %quantity
3122 
3123    swwfile = basename_in + '.sww'
3124    # Note the use of a .ers extension is optional (write_ermapper_grid will
3125    # deal with either option
3126    ersfile = basename_out
3127##    prjfile = basename_out + '.prj'
3128
3129
3130    if verbose: print 'Reading from %s' %swwfile
3131    #Read sww file
3132    from Scientific.IO.NetCDF import NetCDFFile
3133    fid = NetCDFFile(swwfile)
3134
3135    #Get extent and reference
3136    x = fid.variables['x'][:]
3137    y = fid.variables['y'][:]
3138    volumes = fid.variables['volumes'][:]
3139
3140    ymin = min(y); ymax = max(y)
3141    xmin = min(x); xmax = max(x)
3142
3143    number_of_timesteps = fid.dimensions['number_of_timesteps']
3144    number_of_points = fid.dimensions['number_of_points']
3145    if origin is None:
3146
3147        #Get geo_reference
3148        #sww files don't have to have a geo_ref
3149        try:
3150            geo_reference = Geo_reference(NetCDFObject=fid)
3151        except AttributeError, e:
3152            geo_reference = Geo_reference() #Default georef object
3153
3154        xllcorner = geo_reference.get_xllcorner()
3155        yllcorner = geo_reference.get_yllcorner()
3156        zone = geo_reference.get_zone()
3157    else:
3158        zone = origin[0]
3159        xllcorner = origin[1]
3160        yllcorner = origin[2]
3161
3162
3163    #Get quantity and reduce if applicable
3164    if verbose: print 'Reading quantity %s' %quantity
3165
3166    if quantity.lower() == 'depth':
3167        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3168    else:
3169        q = fid.variables[quantity][:]
3170
3171
3172    if len(q.shape) == 2:
3173        if verbose: print 'Reducing quantity %s' %quantity
3174        q_reduced = zeros( number_of_points, Float )
3175
3176        for k in range(number_of_points):
3177            q_reduced[k] = reduction( q[:,k] )
3178
3179        q = q_reduced
3180
3181    #Now q has dimension: number_of_points
3182
3183    #Create grid and update xll/yll corner and x,y
3184    if verbose: print 'Creating grid'
3185    ncols = int((xmax-xmin)/cellsize)+1
3186    nrows = int((ymax-ymin)/cellsize)+1
3187
3188    newxllcorner = xmin+xllcorner
3189    newyllcorner = ymin+yllcorner
3190
3191    x = x+xllcorner-newxllcorner
3192    y = y+yllcorner-newyllcorner
3193
3194    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3195    assert len(vertex_points.shape) == 2
3196
3197
3198    from Numeric import zeros, Float
3199    grid_points = zeros ( (ncols*nrows, 2), Float )
3200
3201
3202    for i in xrange(nrows):
3203        yg = (nrows-i)*cellsize # this will flip the order of the y values
3204        for j in xrange(ncols):
3205            xg = j*cellsize
3206            k = i*ncols + j
3207
3208            grid_points[k,0] = xg
3209            grid_points[k,1] = yg
3210
3211    #Interpolate
3212    from least_squares import Interpolation
3213   
3214
3215    #FIXME: This should be done with precrop = True (?), otherwise it'll
3216    #take forever. With expand_search  set to False, some grid points might
3217    #miss out....
3218
3219    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3220                           precrop = False, expand_search = False,
3221                           verbose = verbose)
3222
3223    #Interpolate using quantity values
3224    if verbose: print 'Interpolating'
3225    grid_values = interp.interpolate(q).flat
3226    grid_values = reshape(grid_values,(nrows, ncols))
3227
3228
3229    # setup header information
3230    header['datum'] = '"' + datum + '"'
3231    # FIXME The use of hardwired UTM and zone number needs to be made optional
3232    # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
3233    header['projection'] = '"UTM-' + str(zone) + '"'
3234    header['coordinatetype'] = 'EN'
3235    if header['coordinatetype'] == 'LL':
3236        header['longitude'] = str(newxllcorner)
3237        header['latitude'] = str(newyllcorner)
3238    elif header['coordinatetype'] == 'EN':
3239        header['eastings'] = str(newxllcorner)
3240        header['northings'] = str(newyllcorner)
3241    header['nullcellvalue'] = str(NODATA_value)
3242    header['xdimension'] = str(cellsize)
3243    header['ydimension'] = str(cellsize)
3244    header['value'] = '"' + quantity + '"'
3245
3246
3247    #Write
3248    if verbose: print 'Writing %s' %ersfile
3249    ermapper_grids.write_ermapper_grid(ersfile,grid_values, header)
3250
3251    fid.close()   
3252
3253##    ascid = open(ascfile, 'w')
3254##
3255##    ascid.write('ncols         %d\n' %ncols)
3256##    ascid.write('nrows         %d\n' %nrows)
3257##    ascid.write('xllcorner     %d\n' %newxllcorner)
3258##    ascid.write('yllcorner     %d\n' %newyllcorner)
3259##    ascid.write('cellsize      %f\n' %cellsize)
3260##    ascid.write('NODATA_value  %d\n' %NODATA_value)
3261##
3262##
3263##    #Get bounding polygon from mesh
3264##    P = interp.mesh.get_boundary_polygon()
3265##    inside_indices = inside_polygon(grid_points, P)
3266##
3267##    for i in range(nrows):
3268##        if verbose and i%((nrows+10)/10)==0:
3269##            print 'Doing row %d of %d' %(i, nrows)
3270##
3271##        for j in range(ncols):
3272##            index = (nrows-i-1)*ncols+j
3273##
3274##            if sometrue(inside_indices == index):
3275##                ascid.write('%f ' %grid_values[index])
3276##            else:
3277##                ascid.write('%d ' %NODATA_value)
3278##
3279##        ascid.write('\n')
3280##
3281##    #Close
3282##    ascid.close()
3283##    fid.close()
3284
Note: See TracBrowser for help on using the repository browser.