source: inundation/pyvolution/data_manager.py @ 2003

Last change on this file since 2003 was 2003, checked in by duncan, 18 years ago

working on gippsland to sww

File size: 94.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, resize
53
54from coordinate_transforms.geo_reference import Geo_reference
55
56def make_filename(s):
57    """Transform argument string into a suitable filename
58    """
59
60    s = s.strip()
61    s = s.replace(' ', '_')
62    s = s.replace('(', '')
63    s = s.replace(')', '')
64    s = s.replace('__', '_')
65
66    return s
67
68
69def check_dir(path, verbose=None):
70    """Check that specified path exists.
71    If path does not exist it will be created if possible
72
73    USAGE:
74       checkdir(path, verbose):
75
76    ARGUMENTS:
77        path -- Directory
78        verbose -- Flag verbose output (default: None)
79
80    RETURN VALUE:
81        Verified path including trailing separator
82
83    """
84
85    import os, sys
86    import os.path
87
88    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
89        unix = 0
90    else:
91        unix = 1
92
93
94    if path[-1] != os.sep:
95        path = path + os.sep  # Add separator for directories
96
97    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
98    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
99        try:
100            exitcode=os.mkdir(path)
101
102            # Change access rights if possible
103            #
104            if unix:
105                exitcode=os.system('chmod 775 '+path)
106            else:
107                pass  # FIXME: What about acces rights under Windows?
108
109            if verbose: print 'MESSAGE: Directory', path, 'created.'
110
111        except:
112            print 'WARNING: Directory', path, 'could not be created.'
113            if unix:
114                path = '/tmp/'
115            else:
116                path = 'C:'
117
118            print 'Using directory %s instead' %path
119
120    return(path)
121
122
123
124def del_dir(path):
125    """Recursively delete directory path and all its contents
126    """
127
128    import os
129
130    if os.path.isdir(path):
131        for file in os.listdir(path):
132            X = os.path.join(path, file)
133
134
135            if os.path.isdir(X) and not os.path.islink(X):
136                del_dir(X)
137            else:
138                try:
139                    os.remove(X)
140                except:
141                    print "Could not remove file %s" %X
142
143        os.rmdir(path)
144
145
146
147def create_filename(datadir, filename, format, size=None, time=None):
148
149    import os
150    #from config import data_dir
151
152    FN = check_dir(datadir) + filename
153
154    if size is not None:
155        FN += '_size%d' %size
156
157    if time is not None:
158        FN += '_time%.2f' %time
159
160    FN += '.' + format
161    return FN
162
163
164def get_files(datadir, filename, format, size):
165    """Get all file (names) with gven name, size and format
166    """
167
168    import glob
169
170    import os
171    #from config import data_dir
172
173    dir = check_dir(datadir)
174
175    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
176    return glob.glob(pattern)
177
178
179
180#Generic class for storing output to e.g. visualisation or checkpointing
181class Data_format:
182    """Generic interface to data formats
183    """
184
185
186    def __init__(self, domain, extension, mode = 'w'):
187        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
188                                        '''   'w' (write)'''+\
189                                        '''   'r' (read)''' +\
190                                        '''   'a' (append)'''
191
192        #Create filename
193        #self.filename = create_filename(domain.get_datadir(),
194        #domain.get_name(), extension, len(domain))
195
196
197        self.filename = create_filename(domain.get_datadir(),
198                        domain.get_name(), extension)
199
200        #print 'F', self.filename
201        self.timestep = 0
202        self.number_of_volumes = len(domain)
203        self.domain = domain
204
205
206        #FIXME: Should we have a general set_precision function?
207
208
209
210#Class for storing output to e.g. visualisation
211class Data_format_sww(Data_format):
212    """Interface to native NetCDF format (.sww) for storing model output
213
214    There are two kinds of data
215
216    1: Constant data: Vertex coordinates and field values. Stored once
217    2: Variable data: Conserved quantities. Stored once per timestep.
218
219    All data is assumed to reside at vertex locations.
220    """
221
222
223    def __init__(self, domain, mode = 'w',\
224                 max_size = 2000000000,
225                 recursion = False):
226        from Scientific.IO.NetCDF import NetCDFFile
227        from Numeric import Int, Float, Float32
228
229        self.precision = Float32 #Use single precision
230        if hasattr(domain, 'max_size'):
231            self.max_size = domain.max_size #file size max is 2Gig
232        else:
233            self.max_size = max_size
234        self.recursion = recursion
235        self.mode = mode
236
237        Data_format.__init__(self, domain, 'sww', mode)
238
239
240        # NetCDF file definition
241        fid = NetCDFFile(self.filename, mode)
242
243        if mode == 'w':
244
245            #Create new file
246            fid.institution = 'Geoscience Australia'
247            fid.description = 'Output from pyvolution suitable for plotting'
248
249            if domain.smooth:
250                fid.smoothing = 'Yes'
251            else:
252                fid.smoothing = 'No'
253
254            fid.order = domain.default_order
255           
256            if hasattr(domain, 'texture'):                 
257                fid.texture = domain.texture
258            #else:                         
259            #    fid.texture = 'None'       
260
261            #Reference point
262            #Start time in seconds since the epoch (midnight 1/1/1970)
263            #FIXME: Use Georef
264            fid.starttime = domain.starttime
265
266            # dimension definitions
267            fid.createDimension('number_of_volumes', self.number_of_volumes)
268            fid.createDimension('number_of_vertices', 3)
269
270            if domain.smooth is True:
271                fid.createDimension('number_of_points', len(domain.vertexlist))
272            else:
273                fid.createDimension('number_of_points', 3*self.number_of_volumes)
274
275            fid.createDimension('number_of_timesteps', None) #extensible
276
277            # variable definitions
278            fid.createVariable('x', self.precision, ('number_of_points',))
279            fid.createVariable('y', self.precision, ('number_of_points',))
280            fid.createVariable('elevation', self.precision, ('number_of_points',))
281            if domain.geo_reference is not None:
282                domain.geo_reference.write_NetCDF(fid)
283               
284            #FIXME: Backwards compatibility
285            fid.createVariable('z', self.precision, ('number_of_points',))
286            #################################
287
288            fid.createVariable('volumes', Int, ('number_of_volumes',
289                                                'number_of_vertices'))
290
291            fid.createVariable('time', self.precision,
292                               ('number_of_timesteps',))
293
294            fid.createVariable('stage', self.precision,
295                               ('number_of_timesteps',
296                                'number_of_points'))
297
298            fid.createVariable('xmomentum', self.precision,
299                               ('number_of_timesteps',
300                                'number_of_points'))
301
302            fid.createVariable('ymomentum', self.precision,
303                               ('number_of_timesteps',
304                                'number_of_points'))
305
306        #Close
307        fid.close()
308
309
310    def store_connectivity(self):
311        """Specialisation of store_connectivity for net CDF format
312
313        Writes x,y,z coordinates of triangles constituting
314        the bed elevation.
315        """
316
317        from Scientific.IO.NetCDF import NetCDFFile
318
319        from Numeric import concatenate, Int
320
321        domain = self.domain
322
323        #Get NetCDF
324        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
325
326        # Get the variables
327        x = fid.variables['x']
328        y = fid.variables['y']
329        z = fid.variables['elevation']
330
331        volumes = fid.variables['volumes']
332
333        # Get X, Y and bed elevation Z
334        Q = domain.quantities['elevation']
335        X,Y,Z,V = Q.get_vertex_values(xy=True,
336                                      precision = self.precision)
337
338
339
340        x[:] = X.astype(self.precision)
341        y[:] = Y.astype(self.precision)
342        z[:] = Z.astype(self.precision)
343
344        #FIXME: Backwards compatibility
345        z = fid.variables['z']
346        z[:] = Z.astype(self.precision)
347        ################################
348
349        volumes[:] = V.astype(volumes.typecode())
350
351        #Close
352        fid.close()
353
354    def store_timestep(self, names):
355        """Store time and named quantities to file
356        """
357        from Scientific.IO.NetCDF import NetCDFFile
358        import types
359        from time import sleep
360        from os import stat
361
362        minimum_allowed_depth = 0.001
363        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
364        from Numeric import choose
365
366        #Get NetCDF
367        retries = 0
368        file_open = False
369        while not file_open and retries < 10:
370            try:
371                fid = NetCDFFile(self.filename, 'a') #Open existing file
372            except IOError:
373                #This could happen if someone was reading the file.
374                #In that case, wait a while and try again
375                msg = 'Warning (store_timestep): File %s could not be opened'\
376                      %self.filename
377                msg += ' - trying step %s again' %self.domain.time
378                print msg
379                retries += 1
380                sleep(1)
381            else:
382                file_open = True
383
384        if not file_open:
385            msg = 'File %s could not be opened for append' %self.filename
386            raise msg
387
388
389
390        #Check to see if the file is already too big:
391        time = fid.variables['time']
392        i = len(time)+1
393        file_size = stat(self.filename)[6]
394        file_size_increase =  file_size/i
395        if file_size + file_size_increase > self.max_size*(2**self.recursion):
396            #in order to get the file name and start time correct,
397            #I change the domian.filename and domain.starttime.
398            #This is the only way to do this without changing
399            #other modules (I think).
400
401            #write a filename addon that won't break swollens reader
402            #(10.sww is bad)
403            filename_ext = '_time_%s'%self.domain.time
404            filename_ext = filename_ext.replace('.', '_')
405            #remember the old filename, then give domain a
406            #name with the extension
407            old_domain_filename = self.domain.filename
408            if not self.recursion:
409                self.domain.filename = self.domain.filename+filename_ext
410
411            #change the domain starttime to the current time
412            old_domain_starttime = self.domain.starttime
413            self.domain.starttime = self.domain.time
414
415            #build a new data_structure.
416            next_data_structure=\
417                Data_format_sww(self.domain, mode=self.mode,\
418                                max_size = self.max_size,\
419                                recursion = self.recursion+1)
420            if not self.recursion:
421                print '    file_size = %s'%file_size
422                print '    saving file to %s'%next_data_structure.filename
423            #set up the new data_structure
424            self.domain.writer = next_data_structure
425
426            #FIXME - could be cleaner to use domain.store_timestep etc.
427            next_data_structure.store_connectivity()
428            next_data_structure.store_timestep(names)
429            fid.sync()
430            fid.close()
431
432            #restore the old starttime and filename
433            self.domain.starttime = old_domain_starttime
434            self.domain.filename = old_domain_filename
435        else:
436            self.recursion = False
437            domain = self.domain
438
439            # Get the variables
440            time = fid.variables['time']
441            stage = fid.variables['stage']
442            xmomentum = fid.variables['xmomentum']
443            ymomentum = fid.variables['ymomentum']
444            i = len(time)
445
446            #Store time
447            time[i] = self.domain.time
448
449
450            if type(names) not in [types.ListType, types.TupleType]:
451                names = [names]
452
453            for name in names:
454                # Get quantity
455                Q = domain.quantities[name]
456                A,V = Q.get_vertex_values(xy = False,
457                                          precision = self.precision)
458
459                #FIXME: Make this general (see below)
460                if name == 'stage':
461                    z = fid.variables['elevation']
462                    #print z[:]
463                    #print A-z[:]
464                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
465                    stage[i,:] = A.astype(self.precision)
466                elif name == 'xmomentum':
467                    xmomentum[i,:] = A.astype(self.precision)
468                elif name == 'ymomentum':
469                    ymomentum[i,:] = A.astype(self.precision)
470
471                #As in....
472                #eval( name + '[i,:] = A.astype(self.precision)' )
473                #FIXME: But we need a UNIT test for that before refactoring
474
475
476
477            #Flush and close
478            fid.sync()
479            fid.close()
480
481
482
483#Class for handling checkpoints data
484class Data_format_cpt(Data_format):
485    """Interface to native NetCDF format (.cpt)
486    """
487
488
489    def __init__(self, domain, mode = 'w'):
490        from Scientific.IO.NetCDF import NetCDFFile
491        from Numeric import Int, Float, Float
492
493        self.precision = Float #Use full precision
494
495        Data_format.__init__(self, domain, 'sww', mode)
496
497
498        # NetCDF file definition
499        fid = NetCDFFile(self.filename, mode)
500
501        if mode == 'w':
502            #Create new file
503            fid.institution = 'Geoscience Australia'
504            fid.description = 'Checkpoint data'
505            #fid.smooth = domain.smooth
506            fid.order = domain.default_order
507
508            # dimension definitions
509            fid.createDimension('number_of_volumes', self.number_of_volumes)
510            fid.createDimension('number_of_vertices', 3)
511
512            #Store info at all vertices (no smoothing)
513            fid.createDimension('number_of_points', 3*self.number_of_volumes)
514            fid.createDimension('number_of_timesteps', None) #extensible
515
516            # variable definitions
517
518            #Mesh
519            fid.createVariable('x', self.precision, ('number_of_points',))
520            fid.createVariable('y', self.precision, ('number_of_points',))
521
522
523            fid.createVariable('volumes', Int, ('number_of_volumes',
524                                                'number_of_vertices'))
525
526            fid.createVariable('time', self.precision,
527                               ('number_of_timesteps',))
528
529            #Allocate space for all quantities
530            for name in domain.quantities.keys():
531                fid.createVariable(name, self.precision,
532                                   ('number_of_timesteps',
533                                    'number_of_points'))
534
535        #Close
536        fid.close()
537
538
539    def store_checkpoint(self):
540        """
541        Write x,y coordinates of triangles.
542        Write connectivity (
543        constituting
544        the bed elevation.
545        """
546
547        from Scientific.IO.NetCDF import NetCDFFile
548
549        from Numeric import concatenate
550
551        domain = self.domain
552
553        #Get NetCDF
554        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
555
556        # Get the variables
557        x = fid.variables['x']
558        y = fid.variables['y']
559
560        volumes = fid.variables['volumes']
561
562        # Get X, Y and bed elevation Z
563        Q = domain.quantities['elevation']
564        X,Y,Z,V = Q.get_vertex_values(xy=True,
565                                      precision = self.precision)
566
567
568
569        x[:] = X.astype(self.precision)
570        y[:] = Y.astype(self.precision)
571        z[:] = Z.astype(self.precision)
572
573        volumes[:] = V
574
575        #Close
576        fid.close()
577
578
579    def store_timestep(self, name):
580        """Store time and named quantity to file
581        """
582        from Scientific.IO.NetCDF import NetCDFFile
583        from time import sleep
584
585        #Get NetCDF
586        retries = 0
587        file_open = False
588        while not file_open and retries < 10:
589            try:
590                fid = NetCDFFile(self.filename, 'a') #Open existing file
591            except IOError:
592                #This could happen if someone was reading the file.
593                #In that case, wait a while and try again
594                msg = 'Warning (store_timestep): File %s could not be opened'\
595                          %self.filename
596                msg += ' - trying again'
597                print msg
598                retries += 1
599                sleep(1)
600            else:
601                file_open = True
602
603            if not file_open:
604                msg = 'File %s could not be opened for append' %self.filename
605            raise msg
606
607
608        domain = self.domain
609
610        # Get the variables
611        time = fid.variables['time']
612        stage = fid.variables['stage']
613        i = len(time)
614
615        #Store stage
616        time[i] = self.domain.time
617
618        # Get quantity
619        Q = domain.quantities[name]
620        A,V = Q.get_vertex_values(xy=False,
621                                  precision = self.precision)
622
623        stage[i,:] = A.astype(self.precision)
624
625        #Flush and close
626        fid.sync()
627        fid.close()
628
629
630
631
632
633#Function for storing xya output
634#FIXME Not done yet for this version
635class Data_format_xya(Data_format):
636    """Generic interface to data formats
637    """
638
639
640    def __init__(self, domain, mode = 'w'):
641        from Scientific.IO.NetCDF import NetCDFFile
642        from Numeric import Int, Float, Float32
643
644        self.precision = Float32 #Use single precision
645
646        Data_format.__init__(self, domain, 'xya', mode)
647
648
649
650    #FIXME -This is the old xya format
651    def store_all(self):
652        """Specialisation of store all for xya format
653
654        Writes x,y,z coordinates of triangles constituting
655        the bed elevation.
656        """
657
658        from Numeric import concatenate
659
660        domain = self.domain
661
662        fd = open(self.filename, 'w')
663
664
665        if domain.smooth is True:
666            number_of_points =  len(domain.vertexlist)
667        else:
668            number_of_points = 3*self.number_of_volumes
669
670        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
671
672        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
673                 " # <vertex #> <x> <y> [attributes]" + "\n")
674
675
676        # Get X, Y, bed elevation and friction (index=0,1)
677        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
678                                           indices = (0,1), precision = self.precision)
679
680        bed_eles = A[:,0]
681        fricts = A[:,1]
682
683        # Get stage (index=0)
684        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
685                                       indices = (0,), precision = self.precision)
686
687        stages = B[:,0]
688
689        #<vertex #> <x> <y> [attributes]
690        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
691                                               stages, fricts):
692
693            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
694            fd.write(s)
695
696        #close
697        fd.close()
698
699
700    def store_timestep(self, t, V0, V1, V2):
701        """Store time, water heights (and momentums) to file
702        """
703        pass
704
705
706#Auxiliary
707def write_obj(filename,x,y,z):
708    """Store x,y,z vectors into filename (obj format)
709       Vectors are assumed to have dimension (M,3) where
710       M corresponds to the number elements.
711       triangles are assumed to be disconnected
712
713       The three numbers in each vector correspond to three vertices,
714
715       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
716
717    """
718    #print 'Writing obj to %s' % filename
719
720    import os.path
721
722    root, ext = os.path.splitext(filename)
723    if ext == '.obj':
724        FN = filename
725    else:
726        FN = filename + '.obj'
727
728
729    outfile = open(FN, 'wb')
730    outfile.write("# Triangulation as an obj file\n")
731
732    M, N = x.shape
733    assert N==3  #Assuming three vertices per element
734
735    for i in range(M):
736        for j in range(N):
737            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
738
739    for i in range(M):
740        base = i*N
741        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
742
743    outfile.close()
744
745
746#########################################################
747#Conversion routines
748########################################################
749
750def sww2obj(basefilename, size):
751    """Convert netcdf based data output to obj
752    """
753    from Scientific.IO.NetCDF import NetCDFFile
754
755    from Numeric import Float, zeros
756
757    #Get NetCDF
758    FN = create_filename('.', basefilename, 'sww', size)
759    print 'Reading from ', FN
760    fid = NetCDFFile(FN, 'r')  #Open existing file for read
761
762
763    # Get the variables
764    x = fid.variables['x']
765    y = fid.variables['y']
766    z = fid.variables['elevation']
767    time = fid.variables['time']
768    stage = fid.variables['stage']
769
770    M = size  #Number of lines
771    xx = zeros((M,3), Float)
772    yy = zeros((M,3), Float)
773    zz = zeros((M,3), Float)
774
775    for i in range(M):
776        for j in range(3):
777            xx[i,j] = x[i+j*M]
778            yy[i,j] = y[i+j*M]
779            zz[i,j] = z[i+j*M]
780
781    #Write obj for bathymetry
782    FN = create_filename('.', basefilename, 'obj', size)
783    write_obj(FN,xx,yy,zz)
784
785
786    #Now read all the data with variable information, combine with
787    #x,y info and store as obj
788
789    for k in range(len(time)):
790        t = time[k]
791        print 'Processing timestep %f' %t
792
793        for i in range(M):
794            for j in range(3):
795                zz[i,j] = stage[k,i+j*M]
796
797
798        #Write obj for variable data
799        #FN = create_filename(basefilename, 'obj', size, time=t)
800        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
801        write_obj(FN,xx,yy,zz)
802
803
804def dat2obj(basefilename):
805    """Convert line based data output to obj
806    FIXME: Obsolete?
807    """
808
809    import glob, os
810    from config import data_dir
811
812
813    #Get bathymetry and x,y's
814    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
815
816    from Numeric import zeros, Float
817
818    M = len(lines)  #Number of lines
819    x = zeros((M,3), Float)
820    y = zeros((M,3), Float)
821    z = zeros((M,3), Float)
822
823    ##i = 0
824    for i, line in enumerate(lines):
825        tokens = line.split()
826        values = map(float,tokens)
827
828        for j in range(3):
829            x[i,j] = values[j*3]
830            y[i,j] = values[j*3+1]
831            z[i,j] = values[j*3+2]
832
833        ##i += 1
834
835
836    #Write obj for bathymetry
837    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
838
839
840    #Now read all the data files with variable information, combine with
841    #x,y info
842    #and store as obj
843
844    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
845
846    for filename in files:
847        print 'Processing %s' % filename
848
849        lines = open(data_dir+os.sep+filename,'r').readlines()
850        assert len(lines) == M
851        root, ext = os.path.splitext(filename)
852
853        #Get time from filename
854        i0 = filename.find('_time=')
855        if i0 == -1:
856            #Skip bathymetry file
857            continue
858
859        i0 += 6  #Position where time starts
860        i1 = filename.find('.dat')
861
862        if i1 > i0:
863            t = float(filename[i0:i1])
864        else:
865            raise 'Hmmmm'
866
867
868
869        ##i = 0
870        for i, line in enumerate(lines):
871            tokens = line.split()
872            values = map(float,tokens)
873
874            for j in range(3):
875                z[i,j] = values[j]
876
877            ##i += 1
878
879        #Write obj for variable data
880        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
881
882
883def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
884    """Read netcdf filename1, pick timesteps first:step:last and save to
885    nettcdf file filename2
886    """
887    from Scientific.IO.NetCDF import NetCDFFile
888
889    #Get NetCDF
890    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
891    outfile = NetCDFFile(filename2, 'w')  #Open new file
892
893
894    #Copy dimensions
895    for d in infile.dimensions:
896        outfile.createDimension(d, infile.dimensions[d])
897
898    for name in infile.variables:
899        var = infile.variables[name]
900        outfile.createVariable(name, var.typecode(), var.dimensions)
901
902
903    #Copy the static variables
904    for name in infile.variables:
905        if name == 'time' or name == 'stage':
906            pass
907        else:
908            #Copy
909            outfile.variables[name][:] = infile.variables[name][:]
910
911    #Copy selected timesteps
912    time = infile.variables['time']
913    stage = infile.variables['stage']
914
915    newtime = outfile.variables['time']
916    newstage = outfile.variables['stage']
917
918    if last is None:
919        last = len(time)
920
921    selection = range(first, last, step)
922    for i, j in enumerate(selection):
923        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
924        newtime[i] = time[j]
925        newstage[i,:] = stage[j,:]
926
927    #Close
928    infile.close()
929    outfile.close()
930
931
932#Get data objects
933def get_dataobject(domain, mode='w'):
934    """Return instance of class of given format using filename
935    """
936
937    cls = eval('Data_format_%s' %domain.format)
938    return cls(domain, mode)
939
940def xya2pts(basename_in, basename_out=None, verbose=False,
941            #easting_min=None, easting_max=None,
942            #northing_min=None, northing_max=None,
943            stride = 1,           
944            attribute_name = 'elevation',
945            z_func = None):
946    """Read points data from ascii (.xya)
947
948    Example:
949
950              x(m)        y(m)        z(m)
951         0.00000e+00  0.00000e+00  1.3535000e-01
952         0.00000e+00  1.40000e-02  1.3535000e-01
953
954   
955
956    Convert to NetCDF pts format which is
957
958    points:  (Nx2) Float array
959    elevation: N Float array
960
961    Only lines that contain three numeric values are processed
962
963    If z_func is specified, it will be applied to the third field
964    """
965
966    import os 
967    #from Scientific.IO.NetCDF import NetCDFFile
968    from Numeric import Float, arrayrange, concatenate
969
970    root, ext = os.path.splitext(basename_in)
971
972    if ext == '': ext = '.xya'
973
974    #Get NetCDF
975    infile = open(root + ext, 'r')  #Open existing xya file for read
976
977    if verbose: print 'Reading xya points from %s' %(root + ext)
978
979    points = []
980    attribute = []
981    for i, line in enumerate(infile.readlines()):
982
983        if i % stride != 0: continue
984       
985        fields = line.split()
986
987        try:
988            assert len(fields) == 3
989        except:   
990            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
991
992        try:
993            x = float( fields[0] )
994            y = float( fields[1] )
995            z = float( fields[2] )           
996        except:
997            continue
998
999        points.append( [x, y] )
1000
1001        if callable(z_func):
1002            attribute.append(z_func(z))           
1003        else:   
1004            attribute.append(z)
1005
1006
1007    #Get output file
1008    if basename_out == None:
1009        ptsname = root + '.pts'
1010    else:
1011        ptsname = basename_out + '.pts'
1012
1013    if verbose: print 'Store to NetCDF file %s' %ptsname
1014    write_ptsfile(ptsname, points, attribute, attribute_name)
1015
1016
1017def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1018    """Write points and associated attribute to pts (NetCDF) format
1019    """
1020
1021    from Numeric import Float
1022   
1023    if attribute_name is None:
1024        attribute_name = 'attribute'       
1025   
1026
1027    from Scientific.IO.NetCDF import NetCDFFile
1028   
1029    # NetCDF file definition
1030    outfile = NetCDFFile(ptsname, 'w')
1031
1032
1033    #Create new file
1034    outfile.institution = 'Geoscience Australia'
1035    outfile.description = 'NetCDF pts format for compact and '\
1036                          'portable storage of spatial point data'
1037   
1038
1039    #Georeferencing
1040    from coordinate_transforms.geo_reference import Geo_reference
1041    Geo_reference().write_NetCDF(outfile)
1042   
1043
1044    outfile.createDimension('number_of_points', len(points))
1045    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1046
1047    # variable definitions
1048    outfile.createVariable('points', Float, ('number_of_points',
1049                                             'number_of_dimensions'))
1050    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1051
1052    # Get handles to the variables
1053    nc_points = outfile.variables['points']
1054    nc_attribute = outfile.variables[attribute_name]
1055
1056    #Store data
1057    nc_points[:, :] = points
1058    nc_attribute[:] = attribute
1059
1060    outfile.close()
1061   
1062
1063def dem2pts(basename_in, basename_out=None, verbose=False,
1064            easting_min=None, easting_max=None,
1065            northing_min=None, northing_max=None):
1066    """Read Digitial Elevation model from the following NetCDF format (.dem)
1067
1068    Example:
1069
1070    ncols         3121
1071    nrows         1800
1072    xllcorner     722000
1073    yllcorner     5893000
1074    cellsize      25
1075    NODATA_value  -9999
1076    138.3698 137.4194 136.5062 135.5558 ..........
1077
1078    Convert to NetCDF pts format which is
1079
1080    points:  (Nx2) Float array
1081    elevation: N Float array
1082    """
1083
1084    #FIXME: Can this be written feasibly using write_pts?
1085
1086    import os
1087    from Scientific.IO.NetCDF import NetCDFFile
1088    from Numeric import Float, zeros, reshape
1089
1090    root = basename_in
1091
1092    #Get NetCDF
1093    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1094
1095    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1096
1097    ncols = infile.ncols[0]
1098    nrows = infile.nrows[0]
1099    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1100    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1101    cellsize = infile.cellsize[0]
1102    NODATA_value = infile.NODATA_value[0]
1103    dem_elevation = infile.variables['elevation']
1104
1105    zone = infile.zone[0]
1106    false_easting = infile.false_easting[0]
1107    false_northing = infile.false_northing[0]
1108
1109    #Text strings
1110    projection = infile.projection
1111    datum = infile.datum
1112    units = infile.units
1113
1114
1115    #Get output file
1116    if basename_out == None:
1117        ptsname = root + '.pts'
1118    else:
1119        ptsname = basename_out + '.pts'
1120
1121    if verbose: print 'Store to NetCDF file %s' %ptsname
1122    # NetCDF file definition
1123    outfile = NetCDFFile(ptsname, 'w')
1124
1125    #Create new file
1126    outfile.institution = 'Geoscience Australia'
1127    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1128                      'of spatial point data'
1129    #assign default values
1130    if easting_min is None: easting_min = xllcorner
1131    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1132    if northing_min is None: northing_min = yllcorner
1133    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1134
1135    #compute offsets to update georeferencing
1136    easting_offset = xllcorner - easting_min
1137    northing_offset = yllcorner - northing_min
1138
1139    #Georeferencing
1140    outfile.zone = zone
1141    outfile.xllcorner = easting_min #Easting of lower left corner
1142    outfile.yllcorner = northing_min #Northing of lower left corner
1143    outfile.false_easting = false_easting
1144    outfile.false_northing = false_northing
1145
1146    outfile.projection = projection
1147    outfile.datum = datum
1148    outfile.units = units
1149
1150
1151    #Grid info (FIXME: probably not going to be used, but heck)
1152    outfile.ncols = ncols
1153    outfile.nrows = nrows
1154
1155
1156    # dimension definitions
1157    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1158    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1159    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1160    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1161
1162    # variable definitions
1163    outfile.createVariable('points', Float, ('number_of_points',
1164                                             'number_of_dimensions'))
1165    outfile.createVariable('elevation', Float, ('number_of_points',))
1166
1167    # Get handles to the variables
1168    points = outfile.variables['points']
1169    elevation = outfile.variables['elevation']
1170
1171    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1172
1173    #Store data
1174    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1175    global_index = 0
1176    for i in range(nrows):
1177        if verbose and i%((nrows+10)/10)==0:
1178            print 'Processing row %d of %d' %(i, nrows)       
1179       
1180        lower_index = global_index
1181        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1182        telev =  zeros(ncols_in_bounding_box, Float)
1183        local_index = 0
1184
1185        y = (nrows-i)*cellsize + yllcorner
1186        for j in range(ncols):
1187
1188            x = j*cellsize + xllcorner
1189            if easting_min <= x <= easting_max and \
1190               northing_min <= y <= northing_max:
1191                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1192                telev[local_index] = dem_elevation_r[i, j]
1193                global_index += 1
1194                local_index += 1
1195
1196        upper_index = global_index
1197
1198        if upper_index == lower_index + ncols_in_bounding_box:
1199            points[lower_index:upper_index, :] = tpoints
1200            elevation[lower_index:upper_index] = telev
1201
1202    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1203
1204    infile.close()
1205    outfile.close()
1206
1207
1208
1209def sww2dem(basename_in, basename_out = None,
1210            quantity = None,
1211            timestep = None,
1212            reduction = None,
1213            cellsize = 10,
1214            easting_min = None,
1215            easting_max = None,
1216            northing_min = None,
1217            northing_max = None,         
1218            verbose = False,
1219            origin = None,
1220            datum = 'WGS84',
1221            format = 'ers'):
1222   
1223    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1224
1225    Example (ASC):
1226
1227    ncols         3121
1228    nrows         1800
1229    xllcorner     722000
1230    yllcorner     5893000
1231    cellsize      25
1232    NODATA_value  -9999
1233    138.3698 137.4194 136.5062 135.5558 ..........
1234
1235    Also write accompanying file with same basename_in but extension .prj
1236    used to fix the UTM zone, datum, false northings and eastings.
1237
1238    The prj format is assumed to be as
1239
1240    Projection    UTM
1241    Zone          56
1242    Datum         WGS84
1243    Zunits        NO
1244    Units         METERS
1245    Spheroid      WGS84
1246    Xshift        0.0000000000
1247    Yshift        10000000.0000000000
1248    Parameters
1249
1250
1251    The parameter quantity must be the name of an existing quantity or
1252    an expression involving existing quantities. The default is
1253    'elevation'.
1254
1255    if timestep (an index) is given, output quantity at that timestep
1256
1257    if reduction is given use that to reduce quantity over all timesteps.
1258
1259    datum
1260   
1261    format can be either 'asc' or 'ers'
1262    """
1263
1264    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1265    from utilities.polygon import inside_polygon
1266    from util import apply_expression_to_dictionary
1267   
1268    msg = 'Format must be either asc or ers'
1269    assert format.lower() in ['asc', 'ers'], msg
1270   
1271    false_easting = 500000
1272    false_northing = 10000000
1273
1274    if quantity is None:
1275        quantity = 'elevation'
1276
1277    if reduction is None:
1278        reduction = max
1279
1280    if basename_out is None:
1281        basename_out = basename_in + '_%s' %quantity
1282 
1283    swwfile = basename_in + '.sww'
1284    demfile = basename_out + '.' + format
1285    # Note the use of a .ers extension is optional (write_ermapper_grid will
1286    # deal with either option
1287
1288    #Read sww file
1289    if verbose: print 'Reading from %s' %swwfile
1290    from Scientific.IO.NetCDF import NetCDFFile
1291    fid = NetCDFFile(swwfile)
1292
1293    #Get extent and reference
1294    x = fid.variables['x'][:]
1295    y = fid.variables['y'][:]
1296    volumes = fid.variables['volumes'][:]
1297
1298    number_of_timesteps = fid.dimensions['number_of_timesteps']
1299    number_of_points = fid.dimensions['number_of_points']
1300    if origin is None:
1301
1302        #Get geo_reference
1303        #sww files don't have to have a geo_ref
1304        try:
1305            geo_reference = Geo_reference(NetCDFObject=fid)
1306        except AttributeError, e:
1307            geo_reference = Geo_reference() #Default georef object
1308
1309        xllcorner = geo_reference.get_xllcorner()
1310        yllcorner = geo_reference.get_yllcorner()
1311        zone = geo_reference.get_zone()
1312    else:
1313        zone = origin[0]
1314        xllcorner = origin[1]
1315        yllcorner = origin[2]
1316
1317
1318
1319    #Get quantity and reduce if applicable
1320    if verbose: print 'Processing quantity %s' %quantity
1321
1322    #Turn NetCDF objects into Numeric arrays
1323    quantity_dict = {}
1324    for name in fid.variables.keys():
1325        quantity_dict[name] = fid.variables[name][:]
1326
1327
1328    q = apply_expression_to_dictionary(quantity, quantity_dict)
1329
1330
1331    if len(q.shape) == 2:
1332        #q has a time component and needs to be reduced along
1333        #the temporal dimension
1334        if verbose: print 'Reducing quantity %s' %quantity
1335        q_reduced = zeros( number_of_points, Float )
1336
1337        for k in range(number_of_points):
1338            q_reduced[k] = reduction( q[:,k] )
1339
1340        q = q_reduced
1341
1342    #Post condition: Now q has dimension: number_of_points
1343    assert len(q.shape) == 1
1344    assert q.shape[0] == number_of_points   
1345   
1346
1347    #Create grid and update xll/yll corner and x,y
1348
1349    #Relative extent
1350    if easting_min is None:
1351        xmin = min(x)
1352    else:
1353        xmin = easting_min - xllcorner
1354
1355    if easting_max is None:
1356        xmax = max(x)
1357    else:
1358        xmax = easting_max - xllcorner
1359       
1360    if northing_min is None:       
1361        ymin = min(y)
1362    else:
1363        ymin = northing_min - yllcorner
1364       
1365    if northing_max is None:       
1366        ymax = max(y)
1367    else:
1368        ymax = northing_max - yllcorner
1369       
1370       
1371   
1372    if verbose: print 'Creating grid'
1373    ncols = int((xmax-xmin)/cellsize)+1
1374    nrows = int((ymax-ymin)/cellsize)+1
1375
1376
1377    #New absolute reference and coordinates
1378    newxllcorner = xmin+xllcorner
1379    newyllcorner = ymin+yllcorner
1380
1381    x = x+xllcorner-newxllcorner
1382    y = y+yllcorner-newyllcorner
1383
1384    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1385    assert len(vertex_points.shape) == 2
1386
1387
1388    from Numeric import zeros, Float
1389    grid_points = zeros ( (ncols*nrows, 2), Float )
1390
1391
1392    for i in xrange(nrows):
1393        if format.lower() == 'asc':
1394            yg = i*cellsize
1395        else:   
1396            #this will flip the order of the y values for ers
1397            yg = (nrows-i)*cellsize
1398           
1399        for j in xrange(ncols):
1400            xg = j*cellsize
1401            k = i*ncols + j
1402
1403            grid_points[k,0] = xg
1404            grid_points[k,1] = yg
1405
1406    #Interpolate
1407    from least_squares import Interpolation
1408   
1409
1410    #FIXME: This should be done with precrop = True (?), otherwise it'll
1411    #take forever. With expand_search  set to False, some grid points might
1412    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1413
1414    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1415                           precrop = False, expand_search = True,
1416                           verbose = verbose)
1417
1418    #Interpolate using quantity values
1419    if verbose: print 'Interpolating'
1420    grid_values = interp.interpolate(q).flat
1421
1422    if format.lower() == 'ers':
1423        # setup ERS header information
1424        grid_values = reshape(grid_values,(nrows, ncols))   
1425        NODATA_value = 0
1426        header = {}
1427        header['datum'] = '"' + datum + '"'
1428        # FIXME The use of hardwired UTM and zone number needs to be made optional
1429        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1430        header['projection'] = '"UTM-' + str(zone) + '"'
1431        header['coordinatetype'] = 'EN'
1432        if header['coordinatetype'] == 'LL':
1433            header['longitude'] = str(newxllcorner)
1434            header['latitude'] = str(newyllcorner)
1435        elif header['coordinatetype'] == 'EN':
1436            header['eastings'] = str(newxllcorner)
1437            header['northings'] = str(newyllcorner)
1438        header['nullcellvalue'] = str(NODATA_value)
1439        header['xdimension'] = str(cellsize)
1440        header['ydimension'] = str(cellsize)
1441        header['value'] = '"' + quantity + '"'
1442
1443
1444        #Write
1445        if verbose: print 'Writing %s' %demfile
1446        import ermapper_grids
1447        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1448
1449        fid.close()   
1450    else:
1451        #Write to Ascii format
1452       
1453        #Write prj file
1454        prjfile = basename_out + '.prj' 
1455       
1456        if verbose: print 'Writing %s' %prjfile
1457        prjid = open(prjfile, 'w')
1458        prjid.write('Projection    %s\n' %'UTM')
1459        prjid.write('Zone          %d\n' %zone)
1460        prjid.write('Datum         %s\n' %datum)
1461        prjid.write('Zunits        NO\n')
1462        prjid.write('Units         METERS\n')
1463        prjid.write('Spheroid      %s\n' %datum)
1464        prjid.write('Xshift        %d\n' %false_easting)
1465        prjid.write('Yshift        %d\n' %false_northing)
1466        prjid.write('Parameters\n')
1467        prjid.close()
1468
1469
1470   
1471        if verbose: print 'Writing %s' %ascfile
1472        NODATA_value = -9999
1473 
1474        ascid = open(demfile, 'w')
1475
1476        ascid.write('ncols         %d\n' %ncols)
1477        ascid.write('nrows         %d\n' %nrows)
1478        ascid.write('xllcorner     %d\n' %newxllcorner)
1479        ascid.write('yllcorner     %d\n' %newyllcorner)
1480        ascid.write('cellsize      %f\n' %cellsize)
1481        ascid.write('NODATA_value  %d\n' %NODATA_value)
1482
1483
1484        #Get bounding polygon from mesh
1485        P = interp.mesh.get_boundary_polygon()
1486        inside_indices = inside_polygon(grid_points, P)
1487
1488        for i in range(nrows):
1489            if verbose and i%((nrows+10)/10)==0:
1490                print 'Doing row %d of %d' %(i, nrows)
1491
1492            for j in range(ncols):
1493                index = (nrows-i-1)*ncols+j
1494
1495                if sometrue(inside_indices == index):
1496                    ascid.write('%f ' %grid_values[index])
1497                else:
1498                    ascid.write('%d ' %NODATA_value)
1499
1500            ascid.write('\n')
1501
1502        #Close
1503        ascid.close()
1504        fid.close()
1505
1506#Backwards compatibility       
1507def sww2asc(basename_in, basename_out = None,
1508            quantity = None,
1509            timestep = None,
1510            reduction = None,
1511            cellsize = 10,
1512            verbose = False,
1513            origin = None):
1514    print 'sww2asc will soon be obsoleted - please use sww2dem'
1515    sww2dem(basename_in,
1516            basename_out = basename_out,
1517            quantity = quantity,
1518            timestep = timestep,
1519            reduction = reduction,
1520            cellsize = cellsize,
1521            verbose = verbose,
1522            origin = origin,
1523            datum = 'WGS84',
1524            format = 'asc')
1525           
1526def sww2ers(basename_in, basename_out = None,
1527            quantity = None,
1528            timestep = None,
1529            reduction = None,
1530            cellsize = 10,
1531            verbose = False,
1532            origin = None,
1533            datum = 'WGS84'):
1534    print 'sww2ers will soon be obsoleted - please use sww2dem'
1535    sww2dem(basename_in, 
1536            basename_out = basename_out,
1537            quantity = quantity,
1538            timestep = timestep,
1539            reduction = reduction,
1540            cellsize = cellsize,
1541            verbose = verbose,
1542            origin = origin,
1543            datum = datum,
1544            format = 'ers')         
1545################################# END COMPATIBILITY ##############         
1546
1547
1548
1549
1550def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1551                                  verbose=False):
1552    """Read Digitial Elevation model from the following ASCII format (.asc)
1553
1554    Example:
1555
1556    ncols         3121
1557    nrows         1800
1558    xllcorner     722000
1559    yllcorner     5893000
1560    cellsize      25
1561    NODATA_value  -9999
1562    138.3698 137.4194 136.5062 135.5558 ..........
1563
1564    Convert basename_in + '.asc' to NetCDF format (.dem)
1565    mimicking the ASCII format closely.
1566
1567
1568    An accompanying file with same basename_in but extension .prj must exist
1569    and is used to fix the UTM zone, datum, false northings and eastings.
1570
1571    The prj format is assumed to be as
1572
1573    Projection    UTM
1574    Zone          56
1575    Datum         WGS84
1576    Zunits        NO
1577    Units         METERS
1578    Spheroid      WGS84
1579    Xshift        0.0000000000
1580    Yshift        10000000.0000000000
1581    Parameters
1582    """
1583
1584    import os
1585    from Scientific.IO.NetCDF import NetCDFFile
1586    from Numeric import Float, array
1587
1588    #root, ext = os.path.splitext(basename_in)
1589    root = basename_in
1590
1591    ###########################################
1592    # Read Meta data
1593    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1594    metadatafile = open(root + '.prj')
1595    metalines = metadatafile.readlines()
1596    metadatafile.close()
1597
1598    L = metalines[0].strip().split()
1599    assert L[0].strip().lower() == 'projection'
1600    projection = L[1].strip()                   #TEXT
1601
1602    L = metalines[1].strip().split()
1603    assert L[0].strip().lower() == 'zone'
1604    zone = int(L[1].strip())
1605
1606    L = metalines[2].strip().split()
1607    assert L[0].strip().lower() == 'datum'
1608    datum = L[1].strip()                        #TEXT
1609
1610    L = metalines[3].strip().split()
1611    assert L[0].strip().lower() == 'zunits'     #IGNORE
1612    zunits = L[1].strip()                       #TEXT
1613
1614    L = metalines[4].strip().split()
1615    assert L[0].strip().lower() == 'units'
1616    units = L[1].strip()                        #TEXT
1617
1618    L = metalines[5].strip().split()
1619    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1620    spheroid = L[1].strip()                     #TEXT
1621
1622    L = metalines[6].strip().split()
1623    assert L[0].strip().lower() == 'xshift'
1624    false_easting = float(L[1].strip())
1625
1626    L = metalines[7].strip().split()
1627    assert L[0].strip().lower() == 'yshift'
1628    false_northing = float(L[1].strip())
1629
1630    #print false_easting, false_northing, zone, datum
1631
1632
1633    ###########################################
1634    #Read DEM data
1635
1636    datafile = open(basename_in + '.asc')
1637
1638    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1639    lines = datafile.readlines()
1640    datafile.close()
1641
1642    if verbose: print 'Got', len(lines), ' lines'
1643
1644    ncols = int(lines[0].split()[1].strip())
1645    nrows = int(lines[1].split()[1].strip())
1646    xllcorner = float(lines[2].split()[1].strip())
1647    yllcorner = float(lines[3].split()[1].strip())
1648    cellsize = float(lines[4].split()[1].strip())
1649    NODATA_value = int(lines[5].split()[1].strip())
1650
1651    assert len(lines) == nrows + 6
1652
1653
1654    ##########################################
1655
1656
1657    if basename_out == None:
1658        netcdfname = root + '.dem'
1659    else:
1660        netcdfname = basename_out + '.dem'
1661
1662    if verbose: print 'Store to NetCDF file %s' %netcdfname
1663    # NetCDF file definition
1664    fid = NetCDFFile(netcdfname, 'w')
1665
1666    #Create new file
1667    fid.institution = 'Geoscience Australia'
1668    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1669                      'of spatial point data'
1670
1671    fid.ncols = ncols
1672    fid.nrows = nrows
1673    fid.xllcorner = xllcorner
1674    fid.yllcorner = yllcorner
1675    fid.cellsize = cellsize
1676    fid.NODATA_value = NODATA_value
1677
1678    fid.zone = zone
1679    fid.false_easting = false_easting
1680    fid.false_northing = false_northing
1681    fid.projection = projection
1682    fid.datum = datum
1683    fid.units = units
1684
1685
1686    # dimension definitions
1687    fid.createDimension('number_of_rows', nrows)
1688    fid.createDimension('number_of_columns', ncols)
1689
1690    # variable definitions
1691    fid.createVariable('elevation', Float, ('number_of_rows',
1692                                            'number_of_columns'))
1693
1694    # Get handles to the variables
1695    elevation = fid.variables['elevation']
1696
1697    #Store data
1698    n = len(lines[6:])
1699    for i, line in enumerate(lines[6:]):
1700        fields = line.split()
1701        if verbose and i%((n+10)/10)==0:
1702            print 'Processing row %d of %d' %(i, nrows)           
1703
1704        elevation[i, :] = array([float(x) for x in fields])
1705
1706    fid.close()
1707
1708
1709
1710
1711
1712def ferret2sww(basename_in, basename_out = None,
1713               verbose = False,
1714               minlat = None, maxlat = None,
1715               minlon = None, maxlon = None,
1716               mint = None, maxt = None, mean_stage = 0,
1717               origin = None, zscale = 1,
1718               fail_on_NaN = True,
1719               NaN_filler = 0,
1720               elevation = None,
1721               inverted_bathymetry = False
1722               ): #FIXME: Bathymetry should be obtained
1723                                  #from MOST somehow.
1724                                  #Alternatively from elsewhere
1725                                  #or, as a last resort,
1726                                  #specified here.
1727                                  #The value of -100 will work
1728                                  #for the Wollongong tsunami
1729                                  #scenario but is very hacky
1730    """Convert 'Ferret' NetCDF format for wave propagation to
1731    sww format native to pyvolution.
1732
1733    Specify only basename_in and read files of the form
1734    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1735    relative height, x-velocity and y-velocity, respectively.
1736
1737    Also convert latitude and longitude to UTM. All coordinates are
1738    assumed to be given in the GDA94 datum.
1739
1740    min's and max's: If omitted - full extend is used.
1741    To include a value min may equal it, while max must exceed it.
1742    Lat and lon are assuemd to be in decimal degrees
1743
1744    origin is a 3-tuple with geo referenced
1745    UTM coordinates (zone, easting, northing)
1746
1747    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1748    which means that longitude is the fastest
1749    varying dimension (row major order, so to speak)
1750
1751    ferret2sww uses grid points as vertices in a triangular grid
1752    counting vertices from lower left corner upwards, then right
1753    """
1754
1755    import os
1756    from Scientific.IO.NetCDF import NetCDFFile
1757    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1758    from Numeric import allclose, around
1759       
1760    precision = Float
1761
1762
1763    #Get NetCDF data
1764    if verbose: print 'Reading files %s_*.nc' %basename_in
1765    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1766    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1767    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1768    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1769
1770    if basename_out is None:
1771        swwname = basename_in + '.sww'
1772    else:
1773        swwname = basename_out + '.sww'
1774
1775    times = file_h.variables['TIME']
1776    latitudes = file_h.variables['LAT']
1777    longitudes = file_h.variables['LON']
1778
1779
1780
1781    #Precision used by most for lat/lon is 4 or 5 decimals
1782    e_lat = around(file_e.variables['LAT'][:], 5)
1783    e_lon = around(file_e.variables['LON'][:], 5)
1784
1785    #Check that files are compatible
1786    assert allclose(latitudes, file_u.variables['LAT'])
1787    assert allclose(latitudes, file_v.variables['LAT'])
1788    assert allclose(latitudes, e_lat)
1789
1790    assert allclose(longitudes, file_u.variables['LON'])
1791    assert allclose(longitudes, file_v.variables['LON'])
1792    assert allclose(longitudes, e_lon)   
1793
1794
1795
1796    if mint == None:
1797        jmin = 0
1798    else:
1799        jmin = searchsorted(times, mint)
1800
1801    if maxt == None:
1802        jmax=len(times)
1803    else:
1804        jmax = searchsorted(times, maxt)
1805
1806    if minlat == None:
1807        kmin=0
1808    else:
1809        kmin = searchsorted(latitudes, minlat)
1810
1811    if maxlat == None:
1812        kmax = len(latitudes)
1813    else:
1814        kmax = searchsorted(latitudes, maxlat)
1815
1816    if minlon == None:
1817        lmin=0
1818    else:
1819        lmin = searchsorted(longitudes, minlon)
1820
1821    if maxlon == None:
1822        lmax = len(longitudes)
1823    else:
1824        lmax = searchsorted(longitudes, maxlon)
1825
1826
1827
1828    times = times[jmin:jmax]
1829    latitudes = latitudes[kmin:kmax]
1830    longitudes = longitudes[lmin:lmax]
1831
1832
1833    if verbose: print 'cropping'
1834    zname = 'ELEVATION'
1835
1836   
1837    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1838    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1839    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1840    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1841
1842#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1843#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1844#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1845#        from Numeric import asarray
1846#        elevations=elevations.tolist()
1847#        elevations.reverse()
1848#        elevations=asarray(elevations)
1849#    else:
1850#        from Numeric import asarray
1851#        elevations=elevations.tolist()
1852#        elevations.reverse()
1853#        elevations=asarray(elevations)
1854#        'print hmmm'
1855
1856
1857
1858    #Get missing values
1859    nan_ha = file_h.variables['HA'].missing_value[0]
1860    nan_ua = file_u.variables['UA'].missing_value[0]
1861    nan_va = file_v.variables['VA'].missing_value[0]
1862    if hasattr(file_e.variables[zname],'missing_value'):
1863        nan_e  = file_e.variables[zname].missing_value[0]
1864    else:
1865        nan_e = None
1866
1867    #Cleanup
1868    from Numeric import sometrue
1869
1870    missing = (amplitudes == nan_ha)
1871    if sometrue (missing):
1872        if fail_on_NaN:
1873            msg = 'NetCDFFile %s contains missing values'\
1874                  %(basename_in+'_ha.nc')
1875            raise msg
1876        else:
1877            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1878
1879    missing = (uspeed == nan_ua)
1880    if sometrue (missing):
1881        if fail_on_NaN:
1882            msg = 'NetCDFFile %s contains missing values'\
1883                  %(basename_in+'_ua.nc')
1884            raise msg
1885        else:
1886            uspeed = uspeed*(missing==0) + missing*NaN_filler
1887
1888    missing = (vspeed == nan_va)
1889    if sometrue (missing):
1890        if fail_on_NaN:
1891            msg = 'NetCDFFile %s contains missing values'\
1892                  %(basename_in+'_va.nc')
1893            raise msg
1894        else:
1895            vspeed = vspeed*(missing==0) + missing*NaN_filler
1896
1897
1898    missing = (elevations == nan_e)
1899    if sometrue (missing):
1900        if fail_on_NaN:
1901            msg = 'NetCDFFile %s contains missing values'\
1902                  %(basename_in+'_e.nc')
1903            raise msg
1904        else:
1905            elevations = elevations*(missing==0) + missing*NaN_filler
1906
1907    #######
1908
1909
1910
1911    number_of_times = times.shape[0]
1912    number_of_latitudes = latitudes.shape[0]
1913    number_of_longitudes = longitudes.shape[0]
1914
1915    assert amplitudes.shape[0] == number_of_times
1916    assert amplitudes.shape[1] == number_of_latitudes
1917    assert amplitudes.shape[2] == number_of_longitudes
1918
1919    if verbose:
1920        print '------------------------------------------------'
1921        print 'Statistics:'
1922        print '  Extent (lat/lon):'
1923        print '    lat in [%f, %f], len(lat) == %d'\
1924              %(min(latitudes.flat), max(latitudes.flat),
1925                len(latitudes.flat))
1926        print '    lon in [%f, %f], len(lon) == %d'\
1927              %(min(longitudes.flat), max(longitudes.flat),
1928                len(longitudes.flat))
1929        print '    t in [%f, %f], len(t) == %d'\
1930              %(min(times.flat), max(times.flat), len(times.flat))
1931
1932        q = amplitudes.flat
1933        name = 'Amplitudes (ha) [cm]'
1934        print %s in [%f, %f]' %(name, min(q), max(q))
1935
1936        q = uspeed.flat
1937        name = 'Speeds (ua) [cm/s]'
1938        print %s in [%f, %f]' %(name, min(q), max(q))
1939
1940        q = vspeed.flat
1941        name = 'Speeds (va) [cm/s]'
1942        print %s in [%f, %f]' %(name, min(q), max(q))
1943
1944        q = elevations.flat
1945        name = 'Elevations (e) [m]'
1946        print %s in [%f, %f]' %(name, min(q), max(q))
1947
1948
1949    #print number_of_latitudes, number_of_longitudes
1950    number_of_points = number_of_latitudes*number_of_longitudes
1951    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1952
1953
1954    file_h.close()
1955    file_u.close()
1956    file_v.close()
1957    file_e.close()
1958
1959
1960    # NetCDF file definition
1961    outfile = NetCDFFile(swwname, 'w')
1962
1963    #Create new file
1964    outfile.institution = 'Geoscience Australia'
1965    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1966                          %(basename_in + '_ha.nc',
1967                            basename_in + '_ua.nc',
1968                            basename_in + '_va.nc',
1969                            basename_in + '_e.nc')
1970
1971
1972    #For sww compatibility
1973    outfile.smoothing = 'Yes'
1974    outfile.order = 1
1975
1976    #Start time in seconds since the epoch (midnight 1/1/1970)
1977    outfile.starttime = starttime = times[0]
1978    times = times - starttime  #Store relative times
1979
1980    # dimension definitions
1981    outfile.createDimension('number_of_volumes', number_of_volumes)
1982
1983    outfile.createDimension('number_of_vertices', 3)
1984    outfile.createDimension('number_of_points', number_of_points)
1985
1986
1987    #outfile.createDimension('number_of_timesteps', len(times))
1988    outfile.createDimension('number_of_timesteps', len(times))
1989
1990    # variable definitions
1991    outfile.createVariable('x', precision, ('number_of_points',))
1992    outfile.createVariable('y', precision, ('number_of_points',))
1993    outfile.createVariable('elevation', precision, ('number_of_points',))
1994
1995    #FIXME: Backwards compatibility
1996    outfile.createVariable('z', precision, ('number_of_points',))
1997    #################################
1998
1999    outfile.createVariable('volumes', Int, ('number_of_volumes',
2000                                            'number_of_vertices'))
2001
2002    outfile.createVariable('time', precision,
2003                           ('number_of_timesteps',))
2004
2005    outfile.createVariable('stage', precision,
2006                           ('number_of_timesteps',
2007                            'number_of_points'))
2008
2009    outfile.createVariable('xmomentum', precision,
2010                           ('number_of_timesteps',
2011                            'number_of_points'))
2012
2013    outfile.createVariable('ymomentum', precision,
2014                           ('number_of_timesteps',
2015                            'number_of_points'))
2016
2017
2018    #Store
2019    from coordinate_transforms.redfearn import redfearn
2020    x = zeros(number_of_points, Float)  #Easting
2021    y = zeros(number_of_points, Float)  #Northing
2022
2023
2024    if verbose: print 'Making triangular grid'
2025    #Check zone boundaries
2026    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2027
2028    vertices = {}
2029    i = 0
2030    for k, lat in enumerate(latitudes):       #Y direction
2031        for l, lon in enumerate(longitudes):  #X direction
2032
2033            vertices[l,k] = i
2034
2035            zone, easting, northing = redfearn(lat,lon)
2036
2037            msg = 'Zone boundary crossed at longitude =', lon
2038            #assert zone == refzone, msg
2039            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2040            x[i] = easting
2041            y[i] = northing
2042            i += 1
2043
2044
2045    #Construct 2 triangles per 'rectangular' element
2046    volumes = []
2047    for l in range(number_of_longitudes-1):    #X direction
2048        for k in range(number_of_latitudes-1): #Y direction
2049            v1 = vertices[l,k+1]
2050            v2 = vertices[l,k]
2051            v3 = vertices[l+1,k+1]
2052            v4 = vertices[l+1,k]
2053
2054            volumes.append([v1,v2,v3]) #Upper element
2055            volumes.append([v4,v3,v2]) #Lower element
2056
2057    volumes = array(volumes)
2058
2059    if origin == None:
2060        zone = refzone
2061        xllcorner = min(x)
2062        yllcorner = min(y)
2063    else:
2064        zone = origin[0]
2065        xllcorner = origin[1]
2066        yllcorner = origin[2]
2067
2068
2069    outfile.xllcorner = xllcorner
2070    outfile.yllcorner = yllcorner
2071    outfile.zone = zone
2072
2073
2074    if elevation is not None:
2075        z = elevation
2076    else:
2077        if inverted_bathymetry:
2078            z = -1*elevations
2079        else:
2080            z = elevations
2081        #FIXME: z should be obtained from MOST and passed in here
2082
2083    from Numeric import resize
2084    z = resize(z,outfile.variables['z'][:].shape)
2085    outfile.variables['x'][:] = x - xllcorner
2086    outfile.variables['y'][:] = y - yllcorner
2087    outfile.variables['z'][:] = z
2088    outfile.variables['elevation'][:] = z  #FIXME HACK
2089    outfile.variables['time'][:] = times   #Store time relative
2090    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2091
2092
2093
2094    #Time stepping
2095    stage = outfile.variables['stage']
2096    xmomentum = outfile.variables['xmomentum']
2097    ymomentum = outfile.variables['ymomentum']
2098
2099    if verbose: print 'Converting quantities'
2100    n = len(times)
2101    for j in range(n):
2102        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2103        i = 0
2104        for k in range(number_of_latitudes):      #Y direction
2105            for l in range(number_of_longitudes): #X direction
2106                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2107                stage[j,i] = w
2108                h = w - z[i]
2109                xmomentum[j,i] = uspeed[j,k,l]/100*h
2110                ymomentum[j,i] = vspeed[j,k,l]/100*h
2111                i += 1
2112
2113
2114    if verbose:
2115        x = outfile.variables['x'][:]
2116        y = outfile.variables['y'][:]
2117        print '------------------------------------------------'
2118        print 'Statistics of output file:'
2119        print '  Name: %s' %swwname
2120        print '  Reference:'
2121        print '    Lower left corner: [%f, %f]'\
2122              %(xllcorner, yllcorner)
2123        print '    Start time: %f' %starttime
2124        print '  Extent:'
2125        print '    x [m] in [%f, %f], len(x) == %d'\
2126              %(min(x.flat), max(x.flat), len(x.flat))
2127        print '    y [m] in [%f, %f], len(y) == %d'\
2128              %(min(y.flat), max(y.flat), len(y.flat))
2129        print '    t [s] in [%f, %f], len(t) == %d'\
2130              %(min(times), max(times), len(times))
2131        print '  Quantities [SI units]:'
2132        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2133            q = outfile.variables[name][:].flat
2134            print '    %s in [%f, %f]' %(name, min(q), max(q))
2135
2136
2137
2138
2139    outfile.close()
2140
2141
2142
2143
2144def timefile2netcdf(filename, quantity_names = None):
2145    """Template for converting typical text files with time series to
2146    NetCDF tms file.
2147
2148
2149    The file format is assumed to be either two fields separated by a comma:
2150
2151        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2152
2153    E.g
2154
2155      31/08/04 00:00:00, 1.328223 0 0
2156      31/08/04 00:15:00, 1.292912 0 0
2157
2158    will provide a time dependent function f(t) with three attributes
2159
2160    filename is assumed to be the rootname with extenisons .txt and .sww
2161    """
2162
2163    import time, calendar
2164    from Numeric import array
2165    from config import time_format
2166    from util import ensure_numeric
2167
2168
2169    fid = open(filename + '.txt')
2170    line = fid.readline()
2171    fid.close()
2172
2173    fields = line.split(',')
2174    msg = 'File %s must have the format date, value0 value1 value2 ...'
2175    assert len(fields) == 2, msg
2176
2177    try:
2178        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2179    except ValueError:
2180        msg = 'First field in file %s must be' %filename
2181        msg += ' date-time with format %s.\n' %time_format
2182        msg += 'I got %s instead.' %fields[0]
2183        raise msg
2184
2185
2186    #Split values
2187    values = []
2188    for value in fields[1].split():
2189        values.append(float(value))
2190
2191    q = ensure_numeric(values)
2192
2193    msg = 'ERROR: File must contain at least one independent value'
2194    assert len(q.shape) == 1, msg
2195
2196
2197
2198    #Read times proper
2199    from Numeric import zeros, Float, alltrue
2200    from config import time_format
2201    import time, calendar
2202
2203    fid = open(filename + '.txt')
2204    lines = fid.readlines()
2205    fid.close()
2206
2207    N = len(lines)
2208    d = len(q)
2209
2210    T = zeros(N, Float)       #Time
2211    Q = zeros((N, d), Float)  #Values
2212
2213    for i, line in enumerate(lines):
2214        fields = line.split(',')
2215        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2216       
2217        T[i] = realtime - starttime
2218
2219        for j, value in enumerate(fields[1].split()):
2220            Q[i, j] = float(value)
2221
2222    msg = 'File %s must list time as a monotonuosly ' %filename
2223    msg += 'increasing sequence'
2224    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2225
2226
2227    #Create NetCDF file
2228    from Scientific.IO.NetCDF import NetCDFFile
2229
2230    fid = NetCDFFile(filename + '.tms', 'w')
2231
2232
2233    fid.institution = 'Geoscience Australia'
2234    fid.description = 'Time series'
2235
2236
2237    #Reference point
2238    #Start time in seconds since the epoch (midnight 1/1/1970)
2239    #FIXME: Use Georef
2240    fid.starttime = starttime
2241   
2242   
2243    # dimension definitions
2244    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2245    #fid.createDimension('number_of_vertices', 3)
2246
2247
2248    fid.createDimension('number_of_timesteps', len(T))
2249
2250    fid.createVariable('time', Float, ('number_of_timesteps',))
2251
2252    fid.variables['time'][:] = T
2253   
2254    for i in range(Q.shape[1]):
2255        try:
2256            name = quantity_names[i]
2257        except:   
2258            name = 'Attribute%d'%i
2259           
2260        fid.createVariable(name, Float, ('number_of_timesteps',))
2261        fid.variables[name][:] = Q[:,i]       
2262
2263    fid.close()
2264
2265
2266def extent_sww(file_name):
2267    """
2268    Read in an sww file.
2269
2270    Input;
2271    file_name - the sww file
2272
2273    Output;
2274    z - Vector of bed elevation
2275    volumes - Array.  Each row has 3 values, representing
2276    the vertices that define the volume
2277    time - Vector of the times where there is stage information
2278    stage - array with respect to time and vertices (x,y)
2279    """
2280
2281
2282    from Scientific.IO.NetCDF import NetCDFFile
2283
2284    #Check contents
2285    #Get NetCDF
2286    fid = NetCDFFile(file_name, 'r')
2287
2288    # Get the variables
2289    x = fid.variables['x'][:]
2290    y = fid.variables['y'][:]
2291    stage = fid.variables['stage'][:]
2292    #print "stage",stage
2293    #print "stage.shap",stage.shape
2294    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2295    #print "min(stage)",min(stage)
2296
2297    fid.close()
2298
2299    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2300
2301
2302def sww2domain(filename,boundary=None,t=None,\
2303               fail_if_NaN=True,NaN_filler=0\
2304               ,verbose = True,very_verbose = False):
2305    """
2306    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2307
2308    Boundary is not recommended if domian.smooth is not selected, as it
2309    uses unique coordinates, but not unique boundaries. This means that
2310    the boundary file will not be compatable with the coordiantes, and will
2311    give a different final boundary, or crash.
2312    """
2313    NaN=9.969209968386869e+036
2314    #initialise NaN.
2315
2316    from Scientific.IO.NetCDF import NetCDFFile
2317    from shallow_water import Domain
2318    from Numeric import asarray, transpose, resize
2319
2320    if verbose: print 'Reading from ', filename
2321    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2322    time = fid.variables['time']       #Timesteps
2323    if t is None:
2324        t = time[-1]
2325    time_interp = get_time_interp(time,t)
2326
2327    # Get the variables as Numeric arrays
2328    x = fid.variables['x'][:]             #x-coordinates of vertices
2329    y = fid.variables['y'][:]             #y-coordinates of vertices
2330    elevation = fid.variables['elevation']     #Elevation
2331    stage = fid.variables['stage']     #Water level
2332    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2333    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2334
2335    starttime = fid.starttime[0]
2336    volumes = fid.variables['volumes'][:] #Connectivity
2337    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2338
2339    conserved_quantities = []
2340    interpolated_quantities = {}
2341    other_quantities = []
2342
2343    # get geo_reference
2344    #sww files don't have to have a geo_ref
2345    try:
2346        geo_reference = Geo_reference(NetCDFObject=fid)
2347    except: #AttributeError, e:
2348        geo_reference = None
2349
2350    if verbose: print '    getting quantities'
2351    for quantity in fid.variables.keys():
2352        dimensions = fid.variables[quantity].dimensions
2353        if 'number_of_timesteps' in dimensions:
2354            conserved_quantities.append(quantity)
2355            interpolated_quantities[quantity]=\
2356                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2357        else: other_quantities.append(quantity)
2358
2359    other_quantities.remove('x')
2360    other_quantities.remove('y')
2361    other_quantities.remove('z')
2362    other_quantities.remove('volumes')
2363
2364    conserved_quantities.remove('time')
2365
2366    if verbose: print '    building domain'
2367#    From domain.Domain:
2368#    domain = Domain(coordinates, volumes,\
2369#                    conserved_quantities = conserved_quantities,\
2370#                    other_quantities = other_quantities,zone=zone,\
2371#                    xllcorner=xllcorner, yllcorner=yllcorner)
2372
2373#   From shallow_water.Domain:
2374    coordinates=coordinates.tolist()
2375    volumes=volumes.tolist()
2376    #FIXME:should this be in mesh?(peter row)
2377    if fid.smoothing == 'Yes': unique = False
2378    else: unique = True
2379    if unique:
2380        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2381
2382
2383    domain = Domain(coordinates, volumes, boundary)
2384
2385    if not boundary is None:
2386        domain.boundary = boundary
2387
2388    domain.geo_reference = geo_reference
2389
2390    domain.starttime=float(starttime)+float(t)
2391    domain.time=0.0
2392
2393    for quantity in other_quantities:
2394        try:
2395            NaN = fid.variables[quantity].missing_value
2396        except:
2397            pass #quantity has no missing_value number
2398        X = fid.variables[quantity][:]
2399        if very_verbose:
2400            print '       ',quantity
2401            print '        NaN =',NaN
2402            print '        max(X)'
2403            print '       ',max(X)
2404            print '        max(X)==NaN'
2405            print '       ',max(X)==NaN
2406            print ''
2407        if (max(X)==NaN) or (min(X)==NaN):
2408            if fail_if_NaN:
2409                msg = 'quantity "%s" contains no_data entry'%quantity
2410                raise msg
2411            else:
2412                data = (X<>NaN)
2413                X = (X*data)+(data==0)*NaN_filler
2414        if unique:
2415            X = resize(X,(len(X)/3,3))
2416        domain.set_quantity(quantity,X)
2417#
2418    for quantity in conserved_quantities:
2419        try:
2420            NaN = fid.variables[quantity].missing_value
2421        except:
2422            pass #quantity has no missing_value number
2423        X = interpolated_quantities[quantity]
2424        if very_verbose:
2425            print '       ',quantity
2426            print '        NaN =',NaN
2427            print '        max(X)'
2428            print '       ',max(X)
2429            print '        max(X)==NaN'
2430            print '       ',max(X)==NaN
2431            print ''
2432        if (max(X)==NaN) or (min(X)==NaN):
2433            if fail_if_NaN:
2434                msg = 'quantity "%s" contains no_data entry'%quantity
2435                raise msg
2436            else:
2437                data = (X<>NaN)
2438                X = (X*data)+(data==0)*NaN_filler
2439        if unique:
2440            X = resize(X,(X.shape[0]/3,3))
2441        domain.set_quantity(quantity,X)
2442    fid.close()
2443    return domain
2444
2445def interpolated_quantity(saved_quantity,time_interp):
2446
2447    #given an index and ratio, interpolate quantity with respect to time.
2448    index,ratio = time_interp
2449    Q = saved_quantity
2450    if ratio > 0:
2451        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2452    else:
2453        q = Q[index]
2454    #Return vector of interpolated values
2455    return q
2456
2457def get_time_interp(time,t=None):
2458    #Finds the ratio and index for time interpolation.
2459    #It is borrowed from previous pyvolution code.
2460    if t is None:
2461        t=time[-1]
2462        index = -1
2463        ratio = 0.
2464    else:
2465        T = time
2466        tau = t
2467        index=0
2468        msg = 'Time interval derived from file %s [%s:%s]'\
2469            %('FIXMEfilename', T[0], T[-1])
2470        msg += ' does not match model time: %s' %tau
2471        if tau < time[0]: raise msg
2472        if tau > time[-1]: raise msg
2473        while tau > time[index]: index += 1
2474        while tau < time[index]: index -= 1
2475        if tau == time[index]:
2476            #Protect against case where tau == time[-1] (last time)
2477            # - also works in general when tau == time[i]
2478            ratio = 0
2479        else:
2480            #t is now between index and index+1
2481            ratio = (tau - time[index])/(time[index+1] - time[index])
2482    return (index,ratio)
2483
2484
2485def weed(coordinates,volumes,boundary = None):
2486    if type(coordinates)=='array':
2487        coordinates = coordinates.tolist()
2488    if type(volumes)=='array':
2489        volumes = volumes.tolist()
2490
2491    unique = False
2492    point_dict = {}
2493    same_point = {}
2494    for i in range(len(coordinates)):
2495        point = tuple(coordinates[i])
2496        if point_dict.has_key(point):
2497            unique = True
2498            same_point[i]=point
2499            #to change all point i references to point j
2500        else:
2501            point_dict[point]=i
2502            same_point[i]=point
2503
2504    coordinates = []
2505    i = 0
2506    for point in point_dict.keys():
2507        point = tuple(point)
2508        coordinates.append(list(point))
2509        point_dict[point]=i
2510        i+=1
2511
2512
2513    for volume in volumes:
2514        for i in range(len(volume)):
2515            index = volume[i]
2516            if index>-1:
2517                volume[i]=point_dict[same_point[index]]
2518
2519    new_boundary = {}
2520    if not boundary is None:
2521        for segment in boundary.keys():
2522            point0 = point_dict[same_point[segment[0]]]
2523            point1 = point_dict[same_point[segment[1]]]
2524            label = boundary[segment]
2525            #FIXME should the bounday attributes be concaterated
2526            #('exterior, pond') or replaced ('pond')(peter row)
2527
2528            if new_boundary.has_key((point0,point1)):
2529                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2530                                              #+','+label
2531
2532            elif new_boundary.has_key((point1,point0)):
2533                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2534                                              #+','+label
2535            else: new_boundary[(point0,point1)]=label
2536
2537        boundary = new_boundary
2538
2539    return coordinates,volumes,boundary
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550###############################################
2551#OBSOLETE STUFF
2552#Native checkpoint format.
2553#Information needed to recreate a state is preserved
2554#FIXME: Rethink and maybe use netcdf format
2555def cpt_variable_writer(filename, t, v0, v1, v2):
2556    """Store all conserved quantities to file
2557    """
2558
2559    M, N = v0.shape
2560
2561    FN = create_filename(filename, 'cpt', M, t)
2562    #print 'Writing to %s' %FN
2563
2564    fid = open(FN, 'w')
2565    for i in range(M):
2566        for j in range(N):
2567            fid.write('%.16e ' %v0[i,j])
2568        for j in range(N):
2569            fid.write('%.16e ' %v1[i,j])
2570        for j in range(N):
2571            fid.write('%.16e ' %v2[i,j])
2572
2573        fid.write('\n')
2574    fid.close()
2575
2576
2577def cpt_variable_reader(filename, t, v0, v1, v2):
2578    """Store all conserved quantities to file
2579    """
2580
2581    M, N = v0.shape
2582
2583    FN = create_filename(filename, 'cpt', M, t)
2584    #print 'Reading from %s' %FN
2585
2586    fid = open(FN)
2587
2588
2589    for i in range(M):
2590        values = fid.readline().split() #Get one line
2591
2592        for j in range(N):
2593            v0[i,j] = float(values[j])
2594            v1[i,j] = float(values[3+j])
2595            v2[i,j] = float(values[6+j])
2596
2597    fid.close()
2598
2599def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2600    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2601    elevation.
2602    FIXME: Not in use pt
2603    """
2604
2605    M, N = v0.shape
2606
2607
2608    print X0
2609    import sys; sys.exit()
2610    FN = create_filename(filename, 'cpt', M)
2611    print 'Writing to %s' %FN
2612
2613    fid = open(FN, 'w')
2614    for i in range(M):
2615        for j in range(2):
2616            fid.write('%.16e ' %X0[i,j])   #x, y
2617        for j in range(N):
2618            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2619
2620        for j in range(2):
2621            fid.write('%.16e ' %X1[i,j])   #x, y
2622        for j in range(N):
2623            fid.write('%.16e ' %v1[i,j])
2624
2625        for j in range(2):
2626            fid.write('%.16e ' %X2[i,j])   #x, y
2627        for j in range(N):
2628            fid.write('%.16e ' %v2[i,j])
2629
2630        fid.write('\n')
2631    fid.close()
2632
2633
2634
2635#Function for storing out to e.g. visualisation
2636#FIXME: Do we want this?
2637#FIXME: Not done yet for this version
2638def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2639    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2640    """
2641
2642    M, N = v0.shape
2643
2644    FN = create_filename(filename, 'dat', M)
2645    #print 'Writing to %s' %FN
2646
2647    fid = open(FN, 'w')
2648    for i in range(M):
2649        for j in range(2):
2650            fid.write('%f ' %X0[i,j])   #x, y
2651        fid.write('%f ' %v0[i,0])       #z
2652
2653        for j in range(2):
2654            fid.write('%f ' %X1[i,j])   #x, y
2655        fid.write('%f ' %v1[i,0])       #z
2656
2657        for j in range(2):
2658            fid.write('%f ' %X2[i,j])   #x, y
2659        fid.write('%f ' %v2[i,0])       #z
2660
2661        fid.write('\n')
2662    fid.close()
2663
2664
2665
2666def dat_variable_writer(filename, t, v0, v1, v2):
2667    """Store water height to file
2668    """
2669
2670    M, N = v0.shape
2671
2672    FN = create_filename(filename, 'dat', M, t)
2673    #print 'Writing to %s' %FN
2674
2675    fid = open(FN, 'w')
2676    for i in range(M):
2677        fid.write('%.4f ' %v0[i,0])
2678        fid.write('%.4f ' %v1[i,0])
2679        fid.write('%.4f ' %v2[i,0])
2680
2681        fid.write('\n')
2682    fid.close()
2683
2684
2685def read_sww(filename):
2686    """Read sww Net CDF file containing Shallow Water Wave simulation
2687
2688    The integer array volumes is of shape Nx3 where N is the number of
2689    triangles in the mesh.
2690
2691    Each entry in volumes is an index into the x,y arrays (the location).
2692
2693    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2694    number_of_timesteps, number_of_points.
2695
2696    The momentum is not always stored.
2697
2698    """
2699    from Scientific.IO.NetCDF import NetCDFFile
2700    print 'Reading from ', filename
2701    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2702#latitude, longitude
2703    # Get the variables as Numeric arrays
2704    x = fid.variables['x']             #x-coordinates of vertices
2705    y = fid.variables['y']             #y-coordinates of vertices
2706    z = fid.variables['elevation']     #Elevation
2707    time = fid.variables['time']       #Timesteps
2708    stage = fid.variables['stage']     #Water level
2709    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2710    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2711
2712    volumes = fid.variables['volumes'] #Connectivity
2713
2714
2715def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2716                 verbose=False):
2717    """Read Digitial Elevation model from the following NetCDF format (.dem)
2718
2719    Example:
2720
2721    ncols         3121
2722    nrows         1800
2723    xllcorner     722000
2724    yllcorner     5893000
2725    cellsize      25
2726    NODATA_value  -9999
2727    138.3698 137.4194 136.5062 135.5558 ..........
2728
2729    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2730    """
2731
2732    import os
2733    from Scientific.IO.NetCDF import NetCDFFile
2734    from Numeric import Float, zeros, sum, reshape, equal
2735
2736    root = basename_in
2737    inname = root + '.dem'
2738
2739    #Open existing netcdf file to read
2740    infile = NetCDFFile(inname, 'r')
2741    if verbose: print 'Reading DEM from %s' %inname
2742
2743    #Read metadata
2744    ncols = infile.ncols[0]
2745    nrows = infile.nrows[0]
2746    xllcorner = infile.xllcorner[0]
2747    yllcorner = infile.yllcorner[0]
2748    cellsize = infile.cellsize[0]
2749    NODATA_value = infile.NODATA_value[0]
2750    zone = infile.zone[0]
2751    false_easting = infile.false_easting[0]
2752    false_northing = infile.false_northing[0]
2753    projection = infile.projection
2754    datum = infile.datum
2755    units = infile.units
2756
2757    dem_elevation = infile.variables['elevation']
2758
2759    #Get output file name
2760    if basename_out == None:
2761        outname = root + '_' + repr(cellsize_new) + '.dem'
2762    else:
2763        outname = basename_out + '.dem'
2764
2765    if verbose: print 'Write decimated NetCDF file to %s' %outname
2766
2767    #Determine some dimensions for decimated grid
2768    (nrows_stencil, ncols_stencil) = stencil.shape
2769    x_offset = ncols_stencil / 2
2770    y_offset = nrows_stencil / 2
2771    cellsize_ratio = int(cellsize_new / cellsize)
2772    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2773    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2774
2775    #Open netcdf file for output
2776    outfile = NetCDFFile(outname, 'w')
2777
2778    #Create new file
2779    outfile.institution = 'Geoscience Australia'
2780    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2781                           'of spatial point data'
2782    #Georeferencing
2783    outfile.zone = zone
2784    outfile.projection = projection
2785    outfile.datum = datum
2786    outfile.units = units
2787
2788    outfile.cellsize = cellsize_new
2789    outfile.NODATA_value = NODATA_value
2790    outfile.false_easting = false_easting
2791    outfile.false_northing = false_northing
2792
2793    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2794    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2795    outfile.ncols = ncols_new
2796    outfile.nrows = nrows_new
2797
2798    # dimension definition
2799    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2800
2801    # variable definition
2802    outfile.createVariable('elevation', Float, ('number_of_points',))
2803
2804    # Get handle to the variable
2805    elevation = outfile.variables['elevation']
2806
2807    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2808
2809    #Store data
2810    global_index = 0
2811    for i in range(nrows_new):
2812        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2813        lower_index = global_index
2814        telev =  zeros(ncols_new, Float)
2815        local_index = 0
2816        trow = i * cellsize_ratio
2817
2818        for j in range(ncols_new):
2819            tcol = j * cellsize_ratio
2820            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2821
2822            #if dem contains 1 or more NODATA_values set value in
2823            #decimated dem to NODATA_value, else compute decimated
2824            #value using stencil
2825            if sum(sum(equal(tmp, NODATA_value))) > 0:
2826                telev[local_index] = NODATA_value
2827            else:
2828                telev[local_index] = sum(sum(tmp * stencil))
2829
2830            global_index += 1
2831            local_index += 1
2832
2833        upper_index = global_index
2834
2835        elevation[lower_index:upper_index] = telev
2836
2837    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2838
2839    infile.close()
2840    outfile.close()
2841
2842
2843   
2844def sww2asc_obsolete(basename_in, basename_out = None,
2845            quantity = None,
2846            timestep = None,
2847            reduction = None,
2848            cellsize = 10,
2849            verbose = False,
2850            origin = None):
2851    """Read SWW file and convert to Digitial Elevation model format (.asc)
2852
2853    Example:
2854
2855    ncols         3121
2856    nrows         1800
2857    xllcorner     722000
2858    yllcorner     5893000
2859    cellsize      25
2860    NODATA_value  -9999
2861    138.3698 137.4194 136.5062 135.5558 ..........
2862
2863    Also write accompanying file with same basename_in but extension .prj
2864    used to fix the UTM zone, datum, false northings and eastings.
2865
2866    The prj format is assumed to be as
2867
2868    Projection    UTM
2869    Zone          56
2870    Datum         WGS84
2871    Zunits        NO
2872    Units         METERS
2873    Spheroid      WGS84
2874    Xshift        0.0000000000
2875    Yshift        10000000.0000000000
2876    Parameters
2877
2878
2879    if quantity is given, out values from quantity otherwise default to
2880    elevation
2881
2882    if timestep (an index) is given, output quantity at that timestep
2883
2884    if reduction is given use that to reduce quantity over all timesteps.
2885
2886    """
2887    from Numeric import array, Float, concatenate, NewAxis, zeros,\
2888         sometrue
2889    from utilities.polygon import inside_polygon
2890
2891    #FIXME: Should be variable
2892    datum = 'WGS84'
2893    false_easting = 500000
2894    false_northing = 10000000
2895
2896    if quantity is None:
2897        quantity = 'elevation'
2898
2899    if reduction is None:
2900        reduction = max
2901
2902    if basename_out is None:
2903        basename_out = basename_in + '_%s' %quantity
2904
2905    swwfile = basename_in + '.sww'
2906    ascfile = basename_out + '.asc'
2907    prjfile = basename_out + '.prj'
2908
2909
2910    if verbose: print 'Reading from %s' %swwfile
2911    #Read sww file
2912    from Scientific.IO.NetCDF import NetCDFFile
2913    fid = NetCDFFile(swwfile)
2914
2915    #Get extent and reference
2916    x = fid.variables['x'][:]
2917    y = fid.variables['y'][:]
2918    volumes = fid.variables['volumes'][:]
2919
2920    ymin = min(y); ymax = max(y)
2921    xmin = min(x); xmax = max(x)
2922
2923    number_of_timesteps = fid.dimensions['number_of_timesteps']
2924    number_of_points = fid.dimensions['number_of_points']
2925    if origin is None:
2926
2927        #Get geo_reference
2928        #sww files don't have to have a geo_ref
2929        try:
2930            geo_reference = Geo_reference(NetCDFObject=fid)
2931        except AttributeError, e:
2932            geo_reference = Geo_reference() #Default georef object
2933
2934        xllcorner = geo_reference.get_xllcorner()
2935        yllcorner = geo_reference.get_yllcorner()
2936        zone = geo_reference.get_zone()
2937    else:
2938        zone = origin[0]
2939        xllcorner = origin[1]
2940        yllcorner = origin[2]
2941
2942
2943    #Get quantity and reduce if applicable
2944    if verbose: print 'Reading quantity %s' %quantity
2945
2946    if quantity.lower() == 'depth':
2947        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2948    else:
2949        q = fid.variables[quantity][:]
2950
2951
2952    if len(q.shape) == 2:
2953        if verbose: print 'Reducing quantity %s' %quantity
2954        q_reduced = zeros( number_of_points, Float )
2955
2956        for k in range(number_of_points):
2957            q_reduced[k] = reduction( q[:,k] )
2958
2959        q = q_reduced
2960
2961    #Now q has dimension: number_of_points
2962
2963    #Create grid and update xll/yll corner and x,y
2964    if verbose: print 'Creating grid'
2965    ncols = int((xmax-xmin)/cellsize)+1
2966    nrows = int((ymax-ymin)/cellsize)+1
2967
2968    newxllcorner = xmin+xllcorner
2969    newyllcorner = ymin+yllcorner
2970
2971    x = x+xllcorner-newxllcorner
2972    y = y+yllcorner-newyllcorner
2973
2974    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2975    assert len(vertex_points.shape) == 2
2976
2977
2978    from Numeric import zeros, Float
2979    grid_points = zeros ( (ncols*nrows, 2), Float )
2980
2981
2982    for i in xrange(nrows):
2983        yg = i*cellsize
2984        for j in xrange(ncols):
2985            xg = j*cellsize
2986            k = i*ncols + j
2987
2988            grid_points[k,0] = xg
2989            grid_points[k,1] = yg
2990
2991    #Interpolate
2992    from least_squares import Interpolation
2993   
2994
2995    #FIXME: This should be done with precrop = True, otherwise it'll
2996    #take forever. With expand_search  set to False, some grid points might
2997    #miss out....
2998
2999    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3000                           precrop = False, expand_search = True,
3001                           verbose = verbose)
3002
3003    #Interpolate using quantity values
3004    if verbose: print 'Interpolating'
3005    grid_values = interp.interpolate(q).flat
3006
3007    #Write
3008    #Write prj file
3009    if verbose: print 'Writing %s' %prjfile
3010    prjid = open(prjfile, 'w')
3011    prjid.write('Projection    %s\n' %'UTM')
3012    prjid.write('Zone          %d\n' %zone)
3013    prjid.write('Datum         %s\n' %datum)
3014    prjid.write('Zunits        NO\n')
3015    prjid.write('Units         METERS\n')
3016    prjid.write('Spheroid      %s\n' %datum)
3017    prjid.write('Xshift        %d\n' %false_easting)
3018    prjid.write('Yshift        %d\n' %false_northing)
3019    prjid.write('Parameters\n')
3020    prjid.close()
3021
3022
3023   
3024    if verbose: print 'Writing %s' %ascfile
3025    NODATA_value = -9999
3026 
3027    ascid = open(ascfile, 'w')
3028
3029    ascid.write('ncols         %d\n' %ncols)
3030    ascid.write('nrows         %d\n' %nrows)
3031    ascid.write('xllcorner     %d\n' %newxllcorner)
3032    ascid.write('yllcorner     %d\n' %newyllcorner)
3033    ascid.write('cellsize      %f\n' %cellsize)
3034    ascid.write('NODATA_value  %d\n' %NODATA_value)
3035
3036
3037    #Get bounding polygon from mesh
3038    P = interp.mesh.get_boundary_polygon()
3039    inside_indices = inside_polygon(grid_points, P)
3040
3041    for i in range(nrows):
3042        if verbose and i%((nrows+10)/10)==0:
3043            print 'Doing row %d of %d' %(i, nrows)
3044
3045        for j in range(ncols):
3046            index = (nrows-i-1)*ncols+j
3047
3048            if sometrue(inside_indices == index):
3049                ascid.write('%f ' %grid_values[index])
3050            else:
3051                ascid.write('%d ' %NODATA_value)
3052
3053        ascid.write('\n')
3054
3055    #Close
3056    ascid.close()
3057    fid.close()
3058
3059def asc_csiro2sww(bath_dir, elevation_dir, sww_file, verbose=False):
3060    """
3061   
3062    assume:
3063    All files are in ersi asc format
3064
3065    4 types of information
3066    bathymetry
3067    elevation
3068    u velocity
3069    v velocity
3070
3071    Assume each type is in a seperate directory
3072    assume one bath file with extention .000
3073    """
3074    from Scientific.IO.NetCDF import NetCDFFile
3075    from Numeric import Float, Int, Int32, searchsorted, zeros, array
3076    from Numeric import allclose, around
3077       
3078    from coordinate_transforms.redfearn import redfearn
3079   
3080    precision = Float # So if we want to change the precision its done here
3081   
3082    # go in to the bath dir and load the 000 file,
3083    bath_files = os.listdir(bath_dir)
3084    #print "bath_files",bath_files
3085
3086    #fixme: make more general?
3087    bath_file = bath_files[0]
3088    bath_dir_file =  bath_dir + os.sep + bath_file
3089    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3090    #print "bath_metadata",bath_metadata
3091    #print "bath_grid",bath_grid
3092
3093    #Use the date.time of the bath file as a basis for
3094    #the start time for other files
3095    base_start = bath_file[-12:]
3096    #print "base_start",base_start
3097   
3098    #go into the elevation dir and load the 000 file
3099    elevation_dir_file = elevation_dir  + os.sep + 'el' + base_start
3100    elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3101    #print "elevation_dir_file",elevation_dir_file
3102    #print "elevation_grid", elevation_grid
3103   
3104    assert bath_metadata == elevation_metadata
3105
3106
3107   
3108   
3109    number_of_latitudes = bath_grid.shape[0] 
3110    number_of_longitudes = bath_grid.shape[1]
3111    number_of_times = len(os.listdir(elevation_dir))
3112    number_of_points = number_of_latitudes*number_of_longitudes
3113    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3114
3115    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] for x in range(number_of_longitudes)]
3116
3117   
3118    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] for y in range(number_of_latitudes)]
3119
3120    # reverse order of lat, so the fist lat represents the first grid row
3121    latitudes.reverse()
3122   
3123    #print "number_of_latitudes", number_of_latitudes
3124    #print "number_of_longitudes", number_of_longitudes
3125    #print "number_of_times", number_of_times
3126    #print "latitudes", latitudes
3127    #print "longitudes", longitudes
3128   
3129    ######### WRITE THE SWW FILE #############
3130    # NetCDF file definition
3131    outfile = NetCDFFile(sww_file, 'w')
3132
3133    #Create new file
3134    outfile.institution = 'Geoscience Australia'
3135    outfile.description = 'Converted from XXX'
3136
3137
3138    #For sww compatibility
3139    outfile.smoothing = 'Yes'
3140    outfile.order = 1
3141   
3142
3143    # dimension definitions
3144    outfile.createDimension('number_of_volumes', number_of_volumes)
3145
3146    outfile.createDimension('number_of_vertices', 3)
3147    outfile.createDimension('number_of_points', number_of_points)
3148    outfile.createDimension('number_of_timesteps', number_of_times)
3149
3150    # variable definitions
3151    outfile.createVariable('x', precision, ('number_of_points',))
3152    outfile.createVariable('y', precision, ('number_of_points',))
3153    outfile.createVariable('elevation', precision, ('number_of_points',))
3154
3155    #FIXME: Backwards compatibility
3156    outfile.createVariable('z', precision, ('number_of_points',))
3157    #################################
3158
3159    outfile.createVariable('volumes', Int, ('number_of_volumes',
3160                                            'number_of_vertices'))
3161
3162    outfile.createVariable('time', precision,
3163                           ('number_of_timesteps',))
3164
3165    outfile.createVariable('stage', precision,
3166                           ('number_of_timesteps',
3167                            'number_of_points'))
3168
3169    outfile.createVariable('xmomentum', precision,
3170                           ('number_of_timesteps',
3171                            'number_of_points'))
3172
3173    outfile.createVariable('ymomentum', precision,
3174                           ('number_of_timesteps',
3175                            'number_of_points'))
3176
3177    #Store
3178    from coordinate_transforms.redfearn import redfearn
3179    x = zeros(number_of_points, Float)  #Easting
3180    y = zeros(number_of_points, Float)  #Northing
3181
3182    if verbose: print 'Making triangular grid'
3183    #Get zone of 1st point.
3184    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3185   
3186    vertices = {}
3187    i = 0
3188    for k, lat in enumerate(latitudes):       
3189        for l, lon in enumerate(longitudes): 
3190
3191            vertices[l,k] = i
3192
3193            zone, easting, northing = redfearn(lat,lon)
3194
3195            msg = 'Zone boundary crossed at longitude =', lon
3196            #assert zone == refzone, msg
3197            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3198            x[i] = easting
3199            y[i] = northing
3200            i += 1
3201
3202
3203    #Construct 2 triangles per 'rectangular' element
3204    volumes = []
3205    for l in range(number_of_longitudes-1):    #X direction
3206        for k in range(number_of_latitudes-1): #Y direction
3207            v1 = vertices[l,k+1]
3208            v2 = vertices[l,k]
3209            v3 = vertices[l+1,k+1]
3210            v4 = vertices[l+1,k]
3211
3212            volumes.append([v1,v2,v3]) #Upper element
3213            volumes.append([v4,v3,v2]) #Lower element
3214
3215    volumes = array(volumes)
3216
3217    # FIXME - use coords
3218    zone = refzone
3219    xllcorner = min(x)
3220    yllcorner = min(y)
3221
3222    outfile.xllcorner = xllcorner
3223    outfile.yllcorner = yllcorner
3224    outfile.zone = zone
3225
3226
3227    z = resize(bath_grid,outfile.variables['z'][:].shape)
3228    outfile.variables['x'][:] = x - xllcorner
3229    outfile.variables['y'][:] = y - yllcorner
3230    outfile.variables['z'][:] = z
3231    #outfile.variables['elevation'][:] = z  #FIXME HACK
3232    #outfile.variables['time'][:] = times   #Store time relative
3233    #outfile.variables['time'][:] = [0]   #Store time relative
3234    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3235
3236    # do this to create an ok sww file?
3237    #outfile.variables['time'][:] = [0]   #Store time relative
3238    #outfile.variables['stage'] = z
3239    #outfile.variables['xmomentum'] = z
3240    #outfile.variables['ymomentum'] = z
3241   
3242    outfile.close()
3243   
3244def _read_asc(filename, verbose=False):
3245    """Read esri file from the following ASCII format (.asc)
3246
3247    Example:
3248
3249    ncols         3121
3250    nrows         1800
3251    xllcorner     722000
3252    yllcorner     5893000
3253    cellsize      25
3254    NODATA_value  -9999
3255    138.3698 137.4194 136.5062 135.5558 ..........
3256
3257    """
3258
3259    datafile = open(filename)
3260
3261    if verbose: print 'Reading DEM from %s' %(filename)
3262    lines = datafile.readlines()
3263    datafile.close()
3264
3265    if verbose: print 'Got', len(lines), ' lines'
3266
3267    ncols = int(lines.pop(0).split()[1].strip())
3268    nrows = int(lines.pop(0).split()[1].strip())
3269    xllcorner = float(lines.pop(0).split()[1].strip())
3270    yllcorner = float(lines.pop(0).split()[1].strip())
3271    cellsize = float(lines.pop(0).split()[1].strip())
3272    NODATA_value = float(lines.pop(0).split()[1].strip())
3273   
3274    assert len(lines) == nrows
3275   
3276    #Store data
3277    grid = []
3278   
3279    n = len(lines)
3280    for i, line in enumerate(lines):
3281        cells = line.split()
3282        assert len(cells) == ncols
3283        grid.append(array([float(x) for x in cells]))
3284    grid = array(grid)
3285
3286    return {'xllcorner':xllcorner,
3287            'yllcorner':yllcorner,
3288            'cellsize':cellsize,
3289            'NODATA_value':NODATA_value}, grid
3290   
Note: See TracBrowser for help on using the repository browser.