source: inundation/pyvolution/data_manager.py @ 2009

Last change on this file since 2009 was 2009, checked in by ole, 18 years ago

Added conversion from HEC-RAS to NetCDF pts + test

File size: 100.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, resize
53
54from coordinate_transforms.geo_reference import Geo_reference
55
56def make_filename(s):
57    """Transform argument string into a suitable filename
58    """
59
60    s = s.strip()
61    s = s.replace(' ', '_')
62    s = s.replace('(', '')
63    s = s.replace(')', '')
64    s = s.replace('__', '_')
65
66    return s
67
68
69def check_dir(path, verbose=None):
70    """Check that specified path exists.
71    If path does not exist it will be created if possible
72
73    USAGE:
74       checkdir(path, verbose):
75
76    ARGUMENTS:
77        path -- Directory
78        verbose -- Flag verbose output (default: None)
79
80    RETURN VALUE:
81        Verified path including trailing separator
82
83    """
84
85    import os, sys
86    import os.path
87
88    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
89        unix = 0
90    else:
91        unix = 1
92
93
94    if path[-1] != os.sep:
95        path = path + os.sep  # Add separator for directories
96
97    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
98    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
99        try:
100            exitcode=os.mkdir(path)
101
102            # Change access rights if possible
103            #
104            if unix:
105                exitcode=os.system('chmod 775 '+path)
106            else:
107                pass  # FIXME: What about acces rights under Windows?
108
109            if verbose: print 'MESSAGE: Directory', path, 'created.'
110
111        except:
112            print 'WARNING: Directory', path, 'could not be created.'
113            if unix:
114                path = '/tmp/'
115            else:
116                path = 'C:'
117
118            print 'Using directory %s instead' %path
119
120    return(path)
121
122
123
124def del_dir(path):
125    """Recursively delete directory path and all its contents
126    """
127
128    import os
129
130    if os.path.isdir(path):
131        for file in os.listdir(path):
132            X = os.path.join(path, file)
133
134
135            if os.path.isdir(X) and not os.path.islink(X):
136                del_dir(X)
137            else:
138                try:
139                    os.remove(X)
140                except:
141                    print "Could not remove file %s" %X
142
143        os.rmdir(path)
144
145
146
147def create_filename(datadir, filename, format, size=None, time=None):
148
149    import os
150    #from config import data_dir
151
152    FN = check_dir(datadir) + filename
153
154    if size is not None:
155        FN += '_size%d' %size
156
157    if time is not None:
158        FN += '_time%.2f' %time
159
160    FN += '.' + format
161    return FN
162
163
164def get_files(datadir, filename, format, size):
165    """Get all file (names) with gven name, size and format
166    """
167
168    import glob
169
170    import os
171    #from config import data_dir
172
173    dir = check_dir(datadir)
174
175    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
176    return glob.glob(pattern)
177
178
179
180#Generic class for storing output to e.g. visualisation or checkpointing
181class Data_format:
182    """Generic interface to data formats
183    """
184
185
186    def __init__(self, domain, extension, mode = 'w'):
187        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
188                                        '''   'w' (write)'''+\
189                                        '''   'r' (read)''' +\
190                                        '''   'a' (append)'''
191
192        #Create filename
193        #self.filename = create_filename(domain.get_datadir(),
194        #domain.get_name(), extension, len(domain))
195
196
197        self.filename = create_filename(domain.get_datadir(),
198                        domain.get_name(), extension)
199
200        #print 'F', self.filename
201        self.timestep = 0
202        self.number_of_volumes = len(domain)
203        self.domain = domain
204
205
206        #FIXME: Should we have a general set_precision function?
207
208
209
210#Class for storing output to e.g. visualisation
211class Data_format_sww(Data_format):
212    """Interface to native NetCDF format (.sww) for storing model output
213
214    There are two kinds of data
215
216    1: Constant data: Vertex coordinates and field values. Stored once
217    2: Variable data: Conserved quantities. Stored once per timestep.
218
219    All data is assumed to reside at vertex locations.
220    """
221
222
223    def __init__(self, domain, mode = 'w',\
224                 max_size = 2000000000,
225                 recursion = False):
226        from Scientific.IO.NetCDF import NetCDFFile
227        from Numeric import Int, Float, Float32
228
229        self.precision = Float32 #Use single precision
230        if hasattr(domain, 'max_size'):
231            self.max_size = domain.max_size #file size max is 2Gig
232        else:
233            self.max_size = max_size
234        self.recursion = recursion
235        self.mode = mode
236
237        Data_format.__init__(self, domain, 'sww', mode)
238
239
240        # NetCDF file definition
241        fid = NetCDFFile(self.filename, mode)
242
243        if mode == 'w':
244
245            #Create new file
246            fid.institution = 'Geoscience Australia'
247            fid.description = 'Output from pyvolution suitable for plotting'
248
249            if domain.smooth:
250                fid.smoothing = 'Yes'
251            else:
252                fid.smoothing = 'No'
253
254            fid.order = domain.default_order
255           
256            if hasattr(domain, 'texture'):                 
257                fid.texture = domain.texture
258            #else:                         
259            #    fid.texture = 'None'       
260
261            #Reference point
262            #Start time in seconds since the epoch (midnight 1/1/1970)
263            #FIXME: Use Georef
264            fid.starttime = domain.starttime
265
266            # dimension definitions
267            fid.createDimension('number_of_volumes', self.number_of_volumes)
268            fid.createDimension('number_of_vertices', 3)
269
270            if domain.smooth is True:
271                fid.createDimension('number_of_points', len(domain.vertexlist))
272            else:
273                fid.createDimension('number_of_points', 3*self.number_of_volumes)
274
275            fid.createDimension('number_of_timesteps', None) #extensible
276
277            # variable definitions
278            fid.createVariable('x', self.precision, ('number_of_points',))
279            fid.createVariable('y', self.precision, ('number_of_points',))
280            fid.createVariable('elevation', self.precision, ('number_of_points',))
281            if domain.geo_reference is not None:
282                domain.geo_reference.write_NetCDF(fid)
283               
284            #FIXME: Backwards compatibility
285            fid.createVariable('z', self.precision, ('number_of_points',))
286            #################################
287
288            fid.createVariable('volumes', Int, ('number_of_volumes',
289                                                'number_of_vertices'))
290
291            fid.createVariable('time', self.precision,
292                               ('number_of_timesteps',))
293
294            fid.createVariable('stage', self.precision,
295                               ('number_of_timesteps',
296                                'number_of_points'))
297
298            fid.createVariable('xmomentum', self.precision,
299                               ('number_of_timesteps',
300                                'number_of_points'))
301
302            fid.createVariable('ymomentum', self.precision,
303                               ('number_of_timesteps',
304                                'number_of_points'))
305
306        #Close
307        fid.close()
308
309
310    def store_connectivity(self):
311        """Specialisation of store_connectivity for net CDF format
312
313        Writes x,y,z coordinates of triangles constituting
314        the bed elevation.
315        """
316
317        from Scientific.IO.NetCDF import NetCDFFile
318
319        from Numeric import concatenate, Int
320
321        domain = self.domain
322
323        #Get NetCDF
324        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
325
326        # Get the variables
327        x = fid.variables['x']
328        y = fid.variables['y']
329        z = fid.variables['elevation']
330
331        volumes = fid.variables['volumes']
332
333        # Get X, Y and bed elevation Z
334        Q = domain.quantities['elevation']
335        X,Y,Z,V = Q.get_vertex_values(xy=True,
336                                      precision = self.precision)
337
338
339
340        x[:] = X.astype(self.precision)
341        y[:] = Y.astype(self.precision)
342        z[:] = Z.astype(self.precision)
343
344        #FIXME: Backwards compatibility
345        z = fid.variables['z']
346        z[:] = Z.astype(self.precision)
347        ################################
348
349        volumes[:] = V.astype(volumes.typecode())
350
351        #Close
352        fid.close()
353
354    def store_timestep(self, names):
355        """Store time and named quantities to file
356        """
357        from Scientific.IO.NetCDF import NetCDFFile
358        import types
359        from time import sleep
360        from os import stat
361
362        minimum_allowed_depth = 0.001
363        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
364        from Numeric import choose
365
366        #Get NetCDF
367        retries = 0
368        file_open = False
369        while not file_open and retries < 10:
370            try:
371                fid = NetCDFFile(self.filename, 'a') #Open existing file
372            except IOError:
373                #This could happen if someone was reading the file.
374                #In that case, wait a while and try again
375                msg = 'Warning (store_timestep): File %s could not be opened'\
376                      %self.filename
377                msg += ' - trying step %s again' %self.domain.time
378                print msg
379                retries += 1
380                sleep(1)
381            else:
382                file_open = True
383
384        if not file_open:
385            msg = 'File %s could not be opened for append' %self.filename
386            raise msg
387
388
389
390        #Check to see if the file is already too big:
391        time = fid.variables['time']
392        i = len(time)+1
393        file_size = stat(self.filename)[6]
394        file_size_increase =  file_size/i
395        if file_size + file_size_increase > self.max_size*(2**self.recursion):
396            #in order to get the file name and start time correct,
397            #I change the domian.filename and domain.starttime.
398            #This is the only way to do this without changing
399            #other modules (I think).
400
401            #write a filename addon that won't break swollens reader
402            #(10.sww is bad)
403            filename_ext = '_time_%s'%self.domain.time
404            filename_ext = filename_ext.replace('.', '_')
405            #remember the old filename, then give domain a
406            #name with the extension
407            old_domain_filename = self.domain.filename
408            if not self.recursion:
409                self.domain.filename = self.domain.filename+filename_ext
410
411            #change the domain starttime to the current time
412            old_domain_starttime = self.domain.starttime
413            self.domain.starttime = self.domain.time
414
415            #build a new data_structure.
416            next_data_structure=\
417                Data_format_sww(self.domain, mode=self.mode,\
418                                max_size = self.max_size,\
419                                recursion = self.recursion+1)
420            if not self.recursion:
421                print '    file_size = %s'%file_size
422                print '    saving file to %s'%next_data_structure.filename
423            #set up the new data_structure
424            self.domain.writer = next_data_structure
425
426            #FIXME - could be cleaner to use domain.store_timestep etc.
427            next_data_structure.store_connectivity()
428            next_data_structure.store_timestep(names)
429            fid.sync()
430            fid.close()
431
432            #restore the old starttime and filename
433            self.domain.starttime = old_domain_starttime
434            self.domain.filename = old_domain_filename
435        else:
436            self.recursion = False
437            domain = self.domain
438
439            # Get the variables
440            time = fid.variables['time']
441            stage = fid.variables['stage']
442            xmomentum = fid.variables['xmomentum']
443            ymomentum = fid.variables['ymomentum']
444            i = len(time)
445
446            #Store time
447            time[i] = self.domain.time
448
449
450            if type(names) not in [types.ListType, types.TupleType]:
451                names = [names]
452
453            for name in names:
454                # Get quantity
455                Q = domain.quantities[name]
456                A,V = Q.get_vertex_values(xy = False,
457                                          precision = self.precision)
458
459                #FIXME: Make this general (see below)
460                if name == 'stage':
461                    z = fid.variables['elevation']
462                    #print z[:]
463                    #print A-z[:]
464                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
465                    stage[i,:] = A.astype(self.precision)
466                elif name == 'xmomentum':
467                    xmomentum[i,:] = A.astype(self.precision)
468                elif name == 'ymomentum':
469                    ymomentum[i,:] = A.astype(self.precision)
470
471                #As in....
472                #eval( name + '[i,:] = A.astype(self.precision)' )
473                #FIXME: But we need a UNIT test for that before refactoring
474
475
476
477            #Flush and close
478            fid.sync()
479            fid.close()
480
481
482
483#Class for handling checkpoints data
484class Data_format_cpt(Data_format):
485    """Interface to native NetCDF format (.cpt)
486    """
487
488
489    def __init__(self, domain, mode = 'w'):
490        from Scientific.IO.NetCDF import NetCDFFile
491        from Numeric import Int, Float, Float
492
493        self.precision = Float #Use full precision
494
495        Data_format.__init__(self, domain, 'sww', mode)
496
497
498        # NetCDF file definition
499        fid = NetCDFFile(self.filename, mode)
500
501        if mode == 'w':
502            #Create new file
503            fid.institution = 'Geoscience Australia'
504            fid.description = 'Checkpoint data'
505            #fid.smooth = domain.smooth
506            fid.order = domain.default_order
507
508            # dimension definitions
509            fid.createDimension('number_of_volumes', self.number_of_volumes)
510            fid.createDimension('number_of_vertices', 3)
511
512            #Store info at all vertices (no smoothing)
513            fid.createDimension('number_of_points', 3*self.number_of_volumes)
514            fid.createDimension('number_of_timesteps', None) #extensible
515
516            # variable definitions
517
518            #Mesh
519            fid.createVariable('x', self.precision, ('number_of_points',))
520            fid.createVariable('y', self.precision, ('number_of_points',))
521
522
523            fid.createVariable('volumes', Int, ('number_of_volumes',
524                                                'number_of_vertices'))
525
526            fid.createVariable('time', self.precision,
527                               ('number_of_timesteps',))
528
529            #Allocate space for all quantities
530            for name in domain.quantities.keys():
531                fid.createVariable(name, self.precision,
532                                   ('number_of_timesteps',
533                                    'number_of_points'))
534
535        #Close
536        fid.close()
537
538
539    def store_checkpoint(self):
540        """
541        Write x,y coordinates of triangles.
542        Write connectivity (
543        constituting
544        the bed elevation.
545        """
546
547        from Scientific.IO.NetCDF import NetCDFFile
548
549        from Numeric import concatenate
550
551        domain = self.domain
552
553        #Get NetCDF
554        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
555
556        # Get the variables
557        x = fid.variables['x']
558        y = fid.variables['y']
559
560        volumes = fid.variables['volumes']
561
562        # Get X, Y and bed elevation Z
563        Q = domain.quantities['elevation']
564        X,Y,Z,V = Q.get_vertex_values(xy=True,
565                                      precision = self.precision)
566
567
568
569        x[:] = X.astype(self.precision)
570        y[:] = Y.astype(self.precision)
571        z[:] = Z.astype(self.precision)
572
573        volumes[:] = V
574
575        #Close
576        fid.close()
577
578
579    def store_timestep(self, name):
580        """Store time and named quantity to file
581        """
582        from Scientific.IO.NetCDF import NetCDFFile
583        from time import sleep
584
585        #Get NetCDF
586        retries = 0
587        file_open = False
588        while not file_open and retries < 10:
589            try:
590                fid = NetCDFFile(self.filename, 'a') #Open existing file
591            except IOError:
592                #This could happen if someone was reading the file.
593                #In that case, wait a while and try again
594                msg = 'Warning (store_timestep): File %s could not be opened'\
595                          %self.filename
596                msg += ' - trying again'
597                print msg
598                retries += 1
599                sleep(1)
600            else:
601                file_open = True
602
603            if not file_open:
604                msg = 'File %s could not be opened for append' %self.filename
605            raise msg
606
607
608        domain = self.domain
609
610        # Get the variables
611        time = fid.variables['time']
612        stage = fid.variables['stage']
613        i = len(time)
614
615        #Store stage
616        time[i] = self.domain.time
617
618        # Get quantity
619        Q = domain.quantities[name]
620        A,V = Q.get_vertex_values(xy=False,
621                                  precision = self.precision)
622
623        stage[i,:] = A.astype(self.precision)
624
625        #Flush and close
626        fid.sync()
627        fid.close()
628
629
630
631
632
633#Function for storing xya output
634#FIXME Not done yet for this version
635class Data_format_xya(Data_format):
636    """Generic interface to data formats
637    """
638
639
640    def __init__(self, domain, mode = 'w'):
641        from Scientific.IO.NetCDF import NetCDFFile
642        from Numeric import Int, Float, Float32
643
644        self.precision = Float32 #Use single precision
645
646        Data_format.__init__(self, domain, 'xya', mode)
647
648
649
650    #FIXME -This is the old xya format
651    def store_all(self):
652        """Specialisation of store all for xya format
653
654        Writes x,y,z coordinates of triangles constituting
655        the bed elevation.
656        """
657
658        from Numeric import concatenate
659
660        domain = self.domain
661
662        fd = open(self.filename, 'w')
663
664
665        if domain.smooth is True:
666            number_of_points =  len(domain.vertexlist)
667        else:
668            number_of_points = 3*self.number_of_volumes
669
670        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
671
672        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
673                 " # <vertex #> <x> <y> [attributes]" + "\n")
674
675
676        # Get X, Y, bed elevation and friction (index=0,1)
677        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
678                                           indices = (0,1), precision = self.precision)
679
680        bed_eles = A[:,0]
681        fricts = A[:,1]
682
683        # Get stage (index=0)
684        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
685                                       indices = (0,), precision = self.precision)
686
687        stages = B[:,0]
688
689        #<vertex #> <x> <y> [attributes]
690        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
691                                               stages, fricts):
692
693            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
694            fd.write(s)
695
696        #close
697        fd.close()
698
699
700    def store_timestep(self, t, V0, V1, V2):
701        """Store time, water heights (and momentums) to file
702        """
703        pass
704
705
706#Auxiliary
707def write_obj(filename,x,y,z):
708    """Store x,y,z vectors into filename (obj format)
709       Vectors are assumed to have dimension (M,3) where
710       M corresponds to the number elements.
711       triangles are assumed to be disconnected
712
713       The three numbers in each vector correspond to three vertices,
714
715       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
716
717    """
718    #print 'Writing obj to %s' % filename
719
720    import os.path
721
722    root, ext = os.path.splitext(filename)
723    if ext == '.obj':
724        FN = filename
725    else:
726        FN = filename + '.obj'
727
728
729    outfile = open(FN, 'wb')
730    outfile.write("# Triangulation as an obj file\n")
731
732    M, N = x.shape
733    assert N==3  #Assuming three vertices per element
734
735    for i in range(M):
736        for j in range(N):
737            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
738
739    for i in range(M):
740        base = i*N
741        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
742
743    outfile.close()
744
745
746#########################################################
747#Conversion routines
748########################################################
749
750def sww2obj(basefilename, size):
751    """Convert netcdf based data output to obj
752    """
753    from Scientific.IO.NetCDF import NetCDFFile
754
755    from Numeric import Float, zeros
756
757    #Get NetCDF
758    FN = create_filename('.', basefilename, 'sww', size)
759    print 'Reading from ', FN
760    fid = NetCDFFile(FN, 'r')  #Open existing file for read
761
762
763    # Get the variables
764    x = fid.variables['x']
765    y = fid.variables['y']
766    z = fid.variables['elevation']
767    time = fid.variables['time']
768    stage = fid.variables['stage']
769
770    M = size  #Number of lines
771    xx = zeros((M,3), Float)
772    yy = zeros((M,3), Float)
773    zz = zeros((M,3), Float)
774
775    for i in range(M):
776        for j in range(3):
777            xx[i,j] = x[i+j*M]
778            yy[i,j] = y[i+j*M]
779            zz[i,j] = z[i+j*M]
780
781    #Write obj for bathymetry
782    FN = create_filename('.', basefilename, 'obj', size)
783    write_obj(FN,xx,yy,zz)
784
785
786    #Now read all the data with variable information, combine with
787    #x,y info and store as obj
788
789    for k in range(len(time)):
790        t = time[k]
791        print 'Processing timestep %f' %t
792
793        for i in range(M):
794            for j in range(3):
795                zz[i,j] = stage[k,i+j*M]
796
797
798        #Write obj for variable data
799        #FN = create_filename(basefilename, 'obj', size, time=t)
800        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
801        write_obj(FN,xx,yy,zz)
802
803
804def dat2obj(basefilename):
805    """Convert line based data output to obj
806    FIXME: Obsolete?
807    """
808
809    import glob, os
810    from config import data_dir
811
812
813    #Get bathymetry and x,y's
814    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
815
816    from Numeric import zeros, Float
817
818    M = len(lines)  #Number of lines
819    x = zeros((M,3), Float)
820    y = zeros((M,3), Float)
821    z = zeros((M,3), Float)
822
823    ##i = 0
824    for i, line in enumerate(lines):
825        tokens = line.split()
826        values = map(float,tokens)
827
828        for j in range(3):
829            x[i,j] = values[j*3]
830            y[i,j] = values[j*3+1]
831            z[i,j] = values[j*3+2]
832
833        ##i += 1
834
835
836    #Write obj for bathymetry
837    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
838
839
840    #Now read all the data files with variable information, combine with
841    #x,y info
842    #and store as obj
843
844    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
845
846    for filename in files:
847        print 'Processing %s' % filename
848
849        lines = open(data_dir+os.sep+filename,'r').readlines()
850        assert len(lines) == M
851        root, ext = os.path.splitext(filename)
852
853        #Get time from filename
854        i0 = filename.find('_time=')
855        if i0 == -1:
856            #Skip bathymetry file
857            continue
858
859        i0 += 6  #Position where time starts
860        i1 = filename.find('.dat')
861
862        if i1 > i0:
863            t = float(filename[i0:i1])
864        else:
865            raise 'Hmmmm'
866
867
868
869        ##i = 0
870        for i, line in enumerate(lines):
871            tokens = line.split()
872            values = map(float,tokens)
873
874            for j in range(3):
875                z[i,j] = values[j]
876
877            ##i += 1
878
879        #Write obj for variable data
880        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
881
882
883def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
884    """Read netcdf filename1, pick timesteps first:step:last and save to
885    nettcdf file filename2
886    """
887    from Scientific.IO.NetCDF import NetCDFFile
888
889    #Get NetCDF
890    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
891    outfile = NetCDFFile(filename2, 'w')  #Open new file
892
893
894    #Copy dimensions
895    for d in infile.dimensions:
896        outfile.createDimension(d, infile.dimensions[d])
897
898    for name in infile.variables:
899        var = infile.variables[name]
900        outfile.createVariable(name, var.typecode(), var.dimensions)
901
902
903    #Copy the static variables
904    for name in infile.variables:
905        if name == 'time' or name == 'stage':
906            pass
907        else:
908            #Copy
909            outfile.variables[name][:] = infile.variables[name][:]
910
911    #Copy selected timesteps
912    time = infile.variables['time']
913    stage = infile.variables['stage']
914
915    newtime = outfile.variables['time']
916    newstage = outfile.variables['stage']
917
918    if last is None:
919        last = len(time)
920
921    selection = range(first, last, step)
922    for i, j in enumerate(selection):
923        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
924        newtime[i] = time[j]
925        newstage[i,:] = stage[j,:]
926
927    #Close
928    infile.close()
929    outfile.close()
930
931
932#Get data objects
933def get_dataobject(domain, mode='w'):
934    """Return instance of class of given format using filename
935    """
936
937    cls = eval('Data_format_%s' %domain.format)
938    return cls(domain, mode)
939
940def xya2pts(basename_in, basename_out=None, verbose=False,
941            #easting_min=None, easting_max=None,
942            #northing_min=None, northing_max=None,
943            stride = 1,           
944            attribute_name = 'elevation',
945            z_func = None):
946    """Read points data from ascii (.xya)
947
948    Example:
949
950              x(m)        y(m)        z(m)
951         0.00000e+00  0.00000e+00  1.3535000e-01
952         0.00000e+00  1.40000e-02  1.3535000e-01
953
954   
955
956    Convert to NetCDF pts format which is
957
958    points:  (Nx2) Float array
959    elevation: N Float array
960
961    Only lines that contain three numeric values are processed
962
963    If z_func is specified, it will be applied to the third field
964    """
965
966    import os 
967    #from Scientific.IO.NetCDF import NetCDFFile
968    from Numeric import Float, arrayrange, concatenate
969
970    root, ext = os.path.splitext(basename_in)
971
972    if ext == '': ext = '.xya'
973
974    #Get NetCDF
975    infile = open(root + ext, 'r')  #Open existing xya file for read
976
977    if verbose: print 'Reading xya points from %s' %(root + ext)
978
979    points = []
980    attribute = []
981    for i, line in enumerate(infile.readlines()):
982
983        if i % stride != 0: continue
984       
985        fields = line.split()
986
987        try:
988            assert len(fields) == 3
989        except:   
990            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
991
992        try:
993            x = float( fields[0] )
994            y = float( fields[1] )
995            z = float( fields[2] )           
996        except:
997            continue
998
999        points.append( [x, y] )
1000
1001        if callable(z_func):
1002            attribute.append(z_func(z))           
1003        else:   
1004            attribute.append(z)
1005
1006
1007    #Get output file
1008    if basename_out == None:
1009        ptsname = root + '.pts'
1010    else:
1011        ptsname = basename_out + '.pts'
1012
1013    if verbose: print 'Store to NetCDF file %s' %ptsname
1014    write_ptsfile(ptsname, points, attribute, attribute_name)
1015
1016
1017def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1018    """Write points and associated attribute to pts (NetCDF) format
1019    """
1020
1021    from Numeric import Float
1022   
1023    if attribute_name is None:
1024        attribute_name = 'attribute'       
1025   
1026
1027    from Scientific.IO.NetCDF import NetCDFFile
1028   
1029    # NetCDF file definition
1030    outfile = NetCDFFile(ptsname, 'w')
1031
1032
1033    #Create new file
1034    outfile.institution = 'Geoscience Australia'
1035    outfile.description = 'NetCDF pts format for compact and '\
1036                          'portable storage of spatial point data'
1037   
1038
1039    #Georeferencing
1040    from coordinate_transforms.geo_reference import Geo_reference
1041    Geo_reference().write_NetCDF(outfile)
1042   
1043
1044    outfile.createDimension('number_of_points', len(points))
1045    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1046
1047    # variable definitions
1048    outfile.createVariable('points', Float, ('number_of_points',
1049                                             'number_of_dimensions'))
1050    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1051
1052    # Get handles to the variables
1053    nc_points = outfile.variables['points']
1054    nc_attribute = outfile.variables[attribute_name]
1055
1056    #Store data
1057    nc_points[:, :] = points
1058    nc_attribute[:] = attribute
1059
1060    outfile.close()
1061   
1062
1063def dem2pts(basename_in, basename_out=None, verbose=False,
1064            easting_min=None, easting_max=None,
1065            northing_min=None, northing_max=None):
1066    """Read Digitial Elevation model from the following NetCDF format (.dem)
1067
1068    Example:
1069
1070    ncols         3121
1071    nrows         1800
1072    xllcorner     722000
1073    yllcorner     5893000
1074    cellsize      25
1075    NODATA_value  -9999
1076    138.3698 137.4194 136.5062 135.5558 ..........
1077
1078    Convert to NetCDF pts format which is
1079
1080    points:  (Nx2) Float array
1081    elevation: N Float array
1082    """
1083
1084    #FIXME: Can this be written feasibly using write_pts?
1085
1086    import os
1087    from Scientific.IO.NetCDF import NetCDFFile
1088    from Numeric import Float, zeros, reshape
1089
1090    root = basename_in
1091
1092    #Get NetCDF
1093    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1094
1095    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1096
1097    ncols = infile.ncols[0]
1098    nrows = infile.nrows[0]
1099    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1100    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1101    cellsize = infile.cellsize[0]
1102    NODATA_value = infile.NODATA_value[0]
1103    dem_elevation = infile.variables['elevation']
1104
1105    zone = infile.zone[0]
1106    false_easting = infile.false_easting[0]
1107    false_northing = infile.false_northing[0]
1108
1109    #Text strings
1110    projection = infile.projection
1111    datum = infile.datum
1112    units = infile.units
1113
1114
1115    #Get output file
1116    if basename_out == None:
1117        ptsname = root + '.pts'
1118    else:
1119        ptsname = basename_out + '.pts'
1120
1121    if verbose: print 'Store to NetCDF file %s' %ptsname
1122    # NetCDF file definition
1123    outfile = NetCDFFile(ptsname, 'w')
1124
1125    #Create new file
1126    outfile.institution = 'Geoscience Australia'
1127    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1128                      'of spatial point data'
1129    #assign default values
1130    if easting_min is None: easting_min = xllcorner
1131    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1132    if northing_min is None: northing_min = yllcorner
1133    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1134
1135    #compute offsets to update georeferencing
1136    easting_offset = xllcorner - easting_min
1137    northing_offset = yllcorner - northing_min
1138
1139    #Georeferencing
1140    outfile.zone = zone
1141    outfile.xllcorner = easting_min #Easting of lower left corner
1142    outfile.yllcorner = northing_min #Northing of lower left corner
1143    outfile.false_easting = false_easting
1144    outfile.false_northing = false_northing
1145
1146    outfile.projection = projection
1147    outfile.datum = datum
1148    outfile.units = units
1149
1150
1151    #Grid info (FIXME: probably not going to be used, but heck)
1152    outfile.ncols = ncols
1153    outfile.nrows = nrows
1154
1155
1156    # dimension definitions
1157    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1158    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1159    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1160    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1161
1162    # variable definitions
1163    outfile.createVariable('points', Float, ('number_of_points',
1164                                             'number_of_dimensions'))
1165    outfile.createVariable('elevation', Float, ('number_of_points',))
1166
1167    # Get handles to the variables
1168    points = outfile.variables['points']
1169    elevation = outfile.variables['elevation']
1170
1171    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1172
1173    #Store data
1174    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1175    global_index = 0
1176    for i in range(nrows):
1177        if verbose and i%((nrows+10)/10)==0:
1178            print 'Processing row %d of %d' %(i, nrows)       
1179       
1180        lower_index = global_index
1181        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1182        telev =  zeros(ncols_in_bounding_box, Float)
1183        local_index = 0
1184
1185        y = (nrows-i)*cellsize + yllcorner
1186        for j in range(ncols):
1187
1188            x = j*cellsize + xllcorner
1189            if easting_min <= x <= easting_max and \
1190               northing_min <= y <= northing_max:
1191                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1192                telev[local_index] = dem_elevation_r[i, j]
1193                global_index += 1
1194                local_index += 1
1195
1196        upper_index = global_index
1197
1198        if upper_index == lower_index + ncols_in_bounding_box:
1199            points[lower_index:upper_index, :] = tpoints
1200            elevation[lower_index:upper_index] = telev
1201
1202    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1203
1204    infile.close()
1205    outfile.close()
1206
1207
1208
1209def _read_hecras_cross_sections(lines):
1210    """Return block of surface lines for each cross section
1211    Starts with SURFACE LINE,
1212    Ends with END CROSS-SECTION 
1213    """
1214
1215    points = []
1216
1217    reading_surface = False
1218    for i, line in enumerate(lines): 
1219       
1220        if len(line.strip()) == 0:    #Ignore blanks
1221            continue                       
1222
1223        if lines[i].strip().startswith('SURFACE LINE'):
1224            reading_surface = True
1225            continue
1226
1227        if lines[i].strip().startswith('END') and reading_surface:
1228            yield points
1229            reading_surface = False
1230            points = []
1231
1232        if reading_surface:
1233            fields = line.strip().split(',')
1234            easting = float(fields[0])
1235            northing = float(fields[1])
1236            elevation = float(fields[2])                       
1237            points.append([easting, northing, elevation])               
1238
1239
1240
1241
1242def hecras_cross_sections2pts(basename_in,
1243                              basename_out=None,
1244                              verbose=False):
1245    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1246   
1247    Example:
1248   
1249
1250# RAS export file created on Mon 15Aug2005 11:42
1251# by HEC-RAS Version 3.1.1
1252
1253BEGIN HEADER:
1254  UNITS: METRIC
1255  DTM TYPE: TIN
1256  DTM: v:\1\cit\perth_topo\river_tin
1257  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1258  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1259  MAP PROJECTION: UTM
1260  PROJECTION ZONE: 50
1261  DATUM: AGD66
1262  VERTICAL DATUM:
1263  NUMBER OF REACHES:  19
1264  NUMBER OF CROSS-SECTIONS:  14206
1265END HEADER:
1266
1267
1268Only the SURFACE LINE data of the following form will be utilised
1269
1270  CROSS-SECTION:
1271    STREAM ID:Southern-Wungong
1272    REACH ID:Southern-Wungong
1273    STATION:19040.*
1274    CUT LINE:
1275      405548.671603161 , 6438142.7594925
1276      405734.536092045 , 6438326.10404912
1277      405745.130459356 , 6438331.48627354
1278      405813.89633823 , 6438368.6272789
1279    SURFACE LINE:
1280     405548.67,   6438142.76,   35.37
1281     405552.24,   6438146.28,   35.41
1282     405554.78,   6438148.78,   35.44
1283     405555.80,   6438149.79,   35.44
1284     405559.37,   6438153.31,   35.45
1285     405560.88,   6438154.81,   35.44
1286     405562.93,   6438156.83,   35.42
1287     405566.50,   6438160.35,   35.38
1288     405566.99,   6438160.83,   35.37
1289     ...
1290   END CROSS-SECTION 
1291
1292    Convert to NetCDF pts format which is
1293
1294    points:  (Nx2) Float array
1295    elevation: N Float array
1296    """
1297
1298    #FIXME: Can this be written feasibly using write_pts?
1299
1300    import os
1301    from Scientific.IO.NetCDF import NetCDFFile
1302    from Numeric import Float, zeros, reshape
1303
1304    root = basename_in
1305
1306    #Get ASCII file
1307    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1308
1309    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1310
1311    lines = infile.readlines()
1312    infile.close()
1313
1314    if verbose: print 'Converting to pts format'
1315
1316    i = 0
1317    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1318        i += 1
1319
1320    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1321    i += 1
1322   
1323    assert lines[i].strip().upper().startswith('UNITS:')
1324    units = lines[i].strip().split()[1]
1325    i += 1   
1326
1327    assert lines[i].strip().upper().startswith('DTM TYPE:')   
1328    i += 1
1329
1330    assert lines[i].strip().upper().startswith('DTM:')   
1331    i += 1           
1332
1333    assert lines[i].strip().upper().startswith('STREAM')   
1334    i += 1
1335
1336    assert lines[i].strip().upper().startswith('CROSS')   
1337    i += 1
1338
1339    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1340    projection = lines[i].strip().split(':')[1]
1341    i += 1
1342
1343    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1344    zone = int(lines[i].strip().split(':')[1])
1345    i += 1
1346
1347    assert lines[i].strip().upper().startswith('DATUM:')
1348    datum = lines[i].strip().split(':')[1]
1349    i += 1
1350
1351    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1352    i += 1
1353
1354    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:') 
1355    i += 1
1356
1357    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1358    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1359    i += 1                       
1360
1361
1362    #Now read all points
1363    points = []
1364    elevation = []
1365    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1366        for k, entry in enumerate(entries):
1367            points.append(entry[:2])
1368            elevation.append(entry[2])           
1369
1370
1371    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1372          %(j+1, number_of_cross_sections)         
1373    assert j+1 == number_of_cross_sections, msg
1374   
1375    #Get output file
1376    if basename_out == None:
1377        ptsname = root + '.pts'
1378    else:
1379        ptsname = basename_out + '.pts'
1380
1381    if verbose: print 'Store to NetCDF file %s' %ptsname
1382    # NetCDF file definition
1383    outfile = NetCDFFile(ptsname, 'w')
1384
1385    #Create new file
1386    outfile.institution = 'Geoscience Australia'
1387    outfile.description = 'NetCDF pts format for compact and portable ' +\
1388                          'storage of spatial point data derived from HEC-RAS'
1389   
1390    #Georeferencing
1391    outfile.zone = zone
1392    outfile.xllcorner = 0.0
1393    outfile.yllcorner = 0.0
1394    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1395    outfile.false_northing = 1000000
1396
1397    outfile.projection = projection
1398    outfile.datum = datum
1399    outfile.units = units
1400
1401
1402    outfile.createDimension('number_of_points', len(points))
1403    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1404
1405    # variable definitions
1406    outfile.createVariable('points', Float, ('number_of_points',
1407                                             'number_of_dimensions'))
1408    outfile.createVariable('elevation', Float, ('number_of_points',))
1409
1410    # Get handles to the variables
1411    outfile.variables['points'][:] = points
1412    outfile.variables['elevation'][:] = elevation
1413
1414    outfile.close()
1415
1416
1417
1418def sww2dem(basename_in, basename_out = None,
1419            quantity = None,
1420            timestep = None,
1421            reduction = None,
1422            cellsize = 10,
1423            easting_min = None,
1424            easting_max = None,
1425            northing_min = None,
1426            northing_max = None,         
1427            verbose = False,
1428            origin = None,
1429            datum = 'WGS84',
1430            format = 'ers'):
1431   
1432    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1433
1434    Example (ASC):
1435
1436    ncols         3121
1437    nrows         1800
1438    xllcorner     722000
1439    yllcorner     5893000
1440    cellsize      25
1441    NODATA_value  -9999
1442    138.3698 137.4194 136.5062 135.5558 ..........
1443
1444    Also write accompanying file with same basename_in but extension .prj
1445    used to fix the UTM zone, datum, false northings and eastings.
1446
1447    The prj format is assumed to be as
1448
1449    Projection    UTM
1450    Zone          56
1451    Datum         WGS84
1452    Zunits        NO
1453    Units         METERS
1454    Spheroid      WGS84
1455    Xshift        0.0000000000
1456    Yshift        10000000.0000000000
1457    Parameters
1458
1459
1460    The parameter quantity must be the name of an existing quantity or
1461    an expression involving existing quantities. The default is
1462    'elevation'.
1463
1464    if timestep (an index) is given, output quantity at that timestep
1465
1466    if reduction is given use that to reduce quantity over all timesteps.
1467
1468    datum
1469   
1470    format can be either 'asc' or 'ers'
1471    """
1472
1473    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1474    from utilities.polygon import inside_polygon
1475    from util import apply_expression_to_dictionary
1476   
1477    msg = 'Format must be either asc or ers'
1478    assert format.lower() in ['asc', 'ers'], msg
1479   
1480    false_easting = 500000
1481    false_northing = 10000000
1482
1483    if quantity is None:
1484        quantity = 'elevation'
1485
1486    if reduction is None:
1487        reduction = max
1488
1489    if basename_out is None:
1490        basename_out = basename_in + '_%s' %quantity
1491 
1492    swwfile = basename_in + '.sww'
1493    demfile = basename_out + '.' + format
1494    # Note the use of a .ers extension is optional (write_ermapper_grid will
1495    # deal with either option
1496
1497    #Read sww file
1498    if verbose: print 'Reading from %s' %swwfile
1499    from Scientific.IO.NetCDF import NetCDFFile
1500    fid = NetCDFFile(swwfile)
1501
1502    #Get extent and reference
1503    x = fid.variables['x'][:]
1504    y = fid.variables['y'][:]
1505    volumes = fid.variables['volumes'][:]
1506
1507    number_of_timesteps = fid.dimensions['number_of_timesteps']
1508    number_of_points = fid.dimensions['number_of_points']
1509    if origin is None:
1510
1511        #Get geo_reference
1512        #sww files don't have to have a geo_ref
1513        try:
1514            geo_reference = Geo_reference(NetCDFObject=fid)
1515        except AttributeError, e:
1516            geo_reference = Geo_reference() #Default georef object
1517
1518        xllcorner = geo_reference.get_xllcorner()
1519        yllcorner = geo_reference.get_yllcorner()
1520        zone = geo_reference.get_zone()
1521    else:
1522        zone = origin[0]
1523        xllcorner = origin[1]
1524        yllcorner = origin[2]
1525
1526
1527
1528    #Get quantity and reduce if applicable
1529    if verbose: print 'Processing quantity %s' %quantity
1530
1531    #Turn NetCDF objects into Numeric arrays
1532    quantity_dict = {}
1533    for name in fid.variables.keys():
1534        quantity_dict[name] = fid.variables[name][:]
1535
1536
1537    q = apply_expression_to_dictionary(quantity, quantity_dict)
1538
1539
1540    if len(q.shape) == 2:
1541        #q has a time component and needs to be reduced along
1542        #the temporal dimension
1543        if verbose: print 'Reducing quantity %s' %quantity
1544        q_reduced = zeros( number_of_points, Float )
1545
1546        for k in range(number_of_points):
1547            q_reduced[k] = reduction( q[:,k] )
1548
1549        q = q_reduced
1550
1551    #Post condition: Now q has dimension: number_of_points
1552    assert len(q.shape) == 1
1553    assert q.shape[0] == number_of_points   
1554   
1555
1556    #Create grid and update xll/yll corner and x,y
1557
1558    #Relative extent
1559    if easting_min is None:
1560        xmin = min(x)
1561    else:
1562        xmin = easting_min - xllcorner
1563
1564    if easting_max is None:
1565        xmax = max(x)
1566    else:
1567        xmax = easting_max - xllcorner
1568       
1569    if northing_min is None:       
1570        ymin = min(y)
1571    else:
1572        ymin = northing_min - yllcorner
1573       
1574    if northing_max is None:       
1575        ymax = max(y)
1576    else:
1577        ymax = northing_max - yllcorner
1578       
1579       
1580   
1581    if verbose: print 'Creating grid'
1582    ncols = int((xmax-xmin)/cellsize)+1
1583    nrows = int((ymax-ymin)/cellsize)+1
1584
1585
1586    #New absolute reference and coordinates
1587    newxllcorner = xmin+xllcorner
1588    newyllcorner = ymin+yllcorner
1589
1590    x = x+xllcorner-newxllcorner
1591    y = y+yllcorner-newyllcorner
1592
1593    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1594    assert len(vertex_points.shape) == 2
1595
1596
1597    from Numeric import zeros, Float
1598    grid_points = zeros ( (ncols*nrows, 2), Float )
1599
1600
1601    for i in xrange(nrows):
1602        if format.lower() == 'asc':
1603            yg = i*cellsize
1604        else:   
1605            #this will flip the order of the y values for ers
1606            yg = (nrows-i)*cellsize
1607           
1608        for j in xrange(ncols):
1609            xg = j*cellsize
1610            k = i*ncols + j
1611
1612            grid_points[k,0] = xg
1613            grid_points[k,1] = yg
1614
1615    #Interpolate
1616    from least_squares import Interpolation
1617   
1618
1619    #FIXME: This should be done with precrop = True (?), otherwise it'll
1620    #take forever. With expand_search  set to False, some grid points might
1621    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1622
1623    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1624                           precrop = False, expand_search = True,
1625                           verbose = verbose)
1626
1627    #Interpolate using quantity values
1628    if verbose: print 'Interpolating'
1629    grid_values = interp.interpolate(q).flat
1630
1631    if format.lower() == 'ers':
1632        # setup ERS header information
1633        grid_values = reshape(grid_values,(nrows, ncols))   
1634        NODATA_value = 0
1635        header = {}
1636        header['datum'] = '"' + datum + '"'
1637        # FIXME The use of hardwired UTM and zone number needs to be made optional
1638        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1639        header['projection'] = '"UTM-' + str(zone) + '"'
1640        header['coordinatetype'] = 'EN'
1641        if header['coordinatetype'] == 'LL':
1642            header['longitude'] = str(newxllcorner)
1643            header['latitude'] = str(newyllcorner)
1644        elif header['coordinatetype'] == 'EN':
1645            header['eastings'] = str(newxllcorner)
1646            header['northings'] = str(newyllcorner)
1647        header['nullcellvalue'] = str(NODATA_value)
1648        header['xdimension'] = str(cellsize)
1649        header['ydimension'] = str(cellsize)
1650        header['value'] = '"' + quantity + '"'
1651
1652
1653        #Write
1654        if verbose: print 'Writing %s' %demfile
1655        import ermapper_grids
1656        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1657
1658        fid.close()   
1659    else:
1660        #Write to Ascii format
1661       
1662        #Write prj file
1663        prjfile = basename_out + '.prj' 
1664       
1665        if verbose: print 'Writing %s' %prjfile
1666        prjid = open(prjfile, 'w')
1667        prjid.write('Projection    %s\n' %'UTM')
1668        prjid.write('Zone          %d\n' %zone)
1669        prjid.write('Datum         %s\n' %datum)
1670        prjid.write('Zunits        NO\n')
1671        prjid.write('Units         METERS\n')
1672        prjid.write('Spheroid      %s\n' %datum)
1673        prjid.write('Xshift        %d\n' %false_easting)
1674        prjid.write('Yshift        %d\n' %false_northing)
1675        prjid.write('Parameters\n')
1676        prjid.close()
1677
1678
1679   
1680        if verbose: print 'Writing %s' %ascfile
1681        NODATA_value = -9999
1682 
1683        ascid = open(demfile, 'w')
1684
1685        ascid.write('ncols         %d\n' %ncols)
1686        ascid.write('nrows         %d\n' %nrows)
1687        ascid.write('xllcorner     %d\n' %newxllcorner)
1688        ascid.write('yllcorner     %d\n' %newyllcorner)
1689        ascid.write('cellsize      %f\n' %cellsize)
1690        ascid.write('NODATA_value  %d\n' %NODATA_value)
1691
1692
1693        #Get bounding polygon from mesh
1694        P = interp.mesh.get_boundary_polygon()
1695        inside_indices = inside_polygon(grid_points, P)
1696
1697        for i in range(nrows):
1698            if verbose and i%((nrows+10)/10)==0:
1699                print 'Doing row %d of %d' %(i, nrows)
1700
1701            for j in range(ncols):
1702                index = (nrows-i-1)*ncols+j
1703
1704                if sometrue(inside_indices == index):
1705                    ascid.write('%f ' %grid_values[index])
1706                else:
1707                    ascid.write('%d ' %NODATA_value)
1708
1709            ascid.write('\n')
1710
1711        #Close
1712        ascid.close()
1713        fid.close()
1714
1715#Backwards compatibility       
1716def sww2asc(basename_in, basename_out = None,
1717            quantity = None,
1718            timestep = None,
1719            reduction = None,
1720            cellsize = 10,
1721            verbose = False,
1722            origin = None):
1723    print 'sww2asc will soon be obsoleted - please use sww2dem'
1724    sww2dem(basename_in,
1725            basename_out = basename_out,
1726            quantity = quantity,
1727            timestep = timestep,
1728            reduction = reduction,
1729            cellsize = cellsize,
1730            verbose = verbose,
1731            origin = origin,
1732            datum = 'WGS84',
1733            format = 'asc')
1734           
1735def sww2ers(basename_in, basename_out = None,
1736            quantity = None,
1737            timestep = None,
1738            reduction = None,
1739            cellsize = 10,
1740            verbose = False,
1741            origin = None,
1742            datum = 'WGS84'):
1743    print 'sww2ers will soon be obsoleted - please use sww2dem'
1744    sww2dem(basename_in, 
1745            basename_out = basename_out,
1746            quantity = quantity,
1747            timestep = timestep,
1748            reduction = reduction,
1749            cellsize = cellsize,
1750            verbose = verbose,
1751            origin = origin,
1752            datum = datum,
1753            format = 'ers')         
1754################################# END COMPATIBILITY ##############         
1755
1756
1757
1758
1759def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1760                                  verbose=False):
1761    """Read Digitial Elevation model from the following ASCII format (.asc)
1762
1763    Example:
1764
1765    ncols         3121
1766    nrows         1800
1767    xllcorner     722000
1768    yllcorner     5893000
1769    cellsize      25
1770    NODATA_value  -9999
1771    138.3698 137.4194 136.5062 135.5558 ..........
1772
1773    Convert basename_in + '.asc' to NetCDF format (.dem)
1774    mimicking the ASCII format closely.
1775
1776
1777    An accompanying file with same basename_in but extension .prj must exist
1778    and is used to fix the UTM zone, datum, false northings and eastings.
1779
1780    The prj format is assumed to be as
1781
1782    Projection    UTM
1783    Zone          56
1784    Datum         WGS84
1785    Zunits        NO
1786    Units         METERS
1787    Spheroid      WGS84
1788    Xshift        0.0000000000
1789    Yshift        10000000.0000000000
1790    Parameters
1791    """
1792
1793    import os
1794    from Scientific.IO.NetCDF import NetCDFFile
1795    from Numeric import Float, array
1796
1797    #root, ext = os.path.splitext(basename_in)
1798    root = basename_in
1799
1800    ###########################################
1801    # Read Meta data
1802    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1803    metadatafile = open(root + '.prj')
1804    metalines = metadatafile.readlines()
1805    metadatafile.close()
1806
1807    L = metalines[0].strip().split()
1808    assert L[0].strip().lower() == 'projection'
1809    projection = L[1].strip()                   #TEXT
1810
1811    L = metalines[1].strip().split()
1812    assert L[0].strip().lower() == 'zone'
1813    zone = int(L[1].strip())
1814
1815    L = metalines[2].strip().split()
1816    assert L[0].strip().lower() == 'datum'
1817    datum = L[1].strip()                        #TEXT
1818
1819    L = metalines[3].strip().split()
1820    assert L[0].strip().lower() == 'zunits'     #IGNORE
1821    zunits = L[1].strip()                       #TEXT
1822
1823    L = metalines[4].strip().split()
1824    assert L[0].strip().lower() == 'units'
1825    units = L[1].strip()                        #TEXT
1826
1827    L = metalines[5].strip().split()
1828    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1829    spheroid = L[1].strip()                     #TEXT
1830
1831    L = metalines[6].strip().split()
1832    assert L[0].strip().lower() == 'xshift'
1833    false_easting = float(L[1].strip())
1834
1835    L = metalines[7].strip().split()
1836    assert L[0].strip().lower() == 'yshift'
1837    false_northing = float(L[1].strip())
1838
1839    #print false_easting, false_northing, zone, datum
1840
1841
1842    ###########################################
1843    #Read DEM data
1844
1845    datafile = open(basename_in + '.asc')
1846
1847    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1848    lines = datafile.readlines()
1849    datafile.close()
1850
1851    if verbose: print 'Got', len(lines), ' lines'
1852
1853    ncols = int(lines[0].split()[1].strip())
1854    nrows = int(lines[1].split()[1].strip())
1855    xllcorner = float(lines[2].split()[1].strip())
1856    yllcorner = float(lines[3].split()[1].strip())
1857    cellsize = float(lines[4].split()[1].strip())
1858    NODATA_value = int(lines[5].split()[1].strip())
1859
1860    assert len(lines) == nrows + 6
1861
1862
1863    ##########################################
1864
1865
1866    if basename_out == None:
1867        netcdfname = root + '.dem'
1868    else:
1869        netcdfname = basename_out + '.dem'
1870
1871    if verbose: print 'Store to NetCDF file %s' %netcdfname
1872    # NetCDF file definition
1873    fid = NetCDFFile(netcdfname, 'w')
1874
1875    #Create new file
1876    fid.institution = 'Geoscience Australia'
1877    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1878                      'of spatial point data'
1879
1880    fid.ncols = ncols
1881    fid.nrows = nrows
1882    fid.xllcorner = xllcorner
1883    fid.yllcorner = yllcorner
1884    fid.cellsize = cellsize
1885    fid.NODATA_value = NODATA_value
1886
1887    fid.zone = zone
1888    fid.false_easting = false_easting
1889    fid.false_northing = false_northing
1890    fid.projection = projection
1891    fid.datum = datum
1892    fid.units = units
1893
1894
1895    # dimension definitions
1896    fid.createDimension('number_of_rows', nrows)
1897    fid.createDimension('number_of_columns', ncols)
1898
1899    # variable definitions
1900    fid.createVariable('elevation', Float, ('number_of_rows',
1901                                            'number_of_columns'))
1902
1903    # Get handles to the variables
1904    elevation = fid.variables['elevation']
1905
1906    #Store data
1907    n = len(lines[6:])
1908    for i, line in enumerate(lines[6:]):
1909        fields = line.split()
1910        if verbose and i%((n+10)/10)==0:
1911            print 'Processing row %d of %d' %(i, nrows)           
1912
1913        elevation[i, :] = array([float(x) for x in fields])
1914
1915    fid.close()
1916
1917
1918
1919
1920
1921def ferret2sww(basename_in, basename_out = None,
1922               verbose = False,
1923               minlat = None, maxlat = None,
1924               minlon = None, maxlon = None,
1925               mint = None, maxt = None, mean_stage = 0,
1926               origin = None, zscale = 1,
1927               fail_on_NaN = True,
1928               NaN_filler = 0,
1929               elevation = None,
1930               inverted_bathymetry = False
1931               ): #FIXME: Bathymetry should be obtained
1932                                  #from MOST somehow.
1933                                  #Alternatively from elsewhere
1934                                  #or, as a last resort,
1935                                  #specified here.
1936                                  #The value of -100 will work
1937                                  #for the Wollongong tsunami
1938                                  #scenario but is very hacky
1939    """Convert 'Ferret' NetCDF format for wave propagation to
1940    sww format native to pyvolution.
1941
1942    Specify only basename_in and read files of the form
1943    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1944    relative height, x-velocity and y-velocity, respectively.
1945
1946    Also convert latitude and longitude to UTM. All coordinates are
1947    assumed to be given in the GDA94 datum.
1948
1949    min's and max's: If omitted - full extend is used.
1950    To include a value min may equal it, while max must exceed it.
1951    Lat and lon are assuemd to be in decimal degrees
1952
1953    origin is a 3-tuple with geo referenced
1954    UTM coordinates (zone, easting, northing)
1955
1956    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1957    which means that longitude is the fastest
1958    varying dimension (row major order, so to speak)
1959
1960    ferret2sww uses grid points as vertices in a triangular grid
1961    counting vertices from lower left corner upwards, then right
1962    """
1963
1964    import os
1965    from Scientific.IO.NetCDF import NetCDFFile
1966    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1967    from Numeric import allclose, around
1968       
1969    precision = Float
1970
1971
1972    #Get NetCDF data
1973    if verbose: print 'Reading files %s_*.nc' %basename_in
1974    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1975    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1976    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1977    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1978
1979    if basename_out is None:
1980        swwname = basename_in + '.sww'
1981    else:
1982        swwname = basename_out + '.sww'
1983
1984    times = file_h.variables['TIME']
1985    latitudes = file_h.variables['LAT']
1986    longitudes = file_h.variables['LON']
1987
1988
1989
1990    #Precision used by most for lat/lon is 4 or 5 decimals
1991    e_lat = around(file_e.variables['LAT'][:], 5)
1992    e_lon = around(file_e.variables['LON'][:], 5)
1993
1994    #Check that files are compatible
1995    assert allclose(latitudes, file_u.variables['LAT'])
1996    assert allclose(latitudes, file_v.variables['LAT'])
1997    assert allclose(latitudes, e_lat)
1998
1999    assert allclose(longitudes, file_u.variables['LON'])
2000    assert allclose(longitudes, file_v.variables['LON'])
2001    assert allclose(longitudes, e_lon)   
2002
2003
2004
2005    if mint == None:
2006        jmin = 0
2007    else:
2008        jmin = searchsorted(times, mint)
2009
2010    if maxt == None:
2011        jmax=len(times)
2012    else:
2013        jmax = searchsorted(times, maxt)
2014
2015    if minlat == None:
2016        kmin=0
2017    else:
2018        kmin = searchsorted(latitudes, minlat)
2019
2020    if maxlat == None:
2021        kmax = len(latitudes)
2022    else:
2023        kmax = searchsorted(latitudes, maxlat)
2024
2025    if minlon == None:
2026        lmin=0
2027    else:
2028        lmin = searchsorted(longitudes, minlon)
2029
2030    if maxlon == None:
2031        lmax = len(longitudes)
2032    else:
2033        lmax = searchsorted(longitudes, maxlon)
2034
2035
2036
2037    times = times[jmin:jmax]
2038    latitudes = latitudes[kmin:kmax]
2039    longitudes = longitudes[lmin:lmax]
2040
2041
2042    if verbose: print 'cropping'
2043    zname = 'ELEVATION'
2044
2045   
2046    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2047    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2048    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2049    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2050
2051#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2052#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2053#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2054#        from Numeric import asarray
2055#        elevations=elevations.tolist()
2056#        elevations.reverse()
2057#        elevations=asarray(elevations)
2058#    else:
2059#        from Numeric import asarray
2060#        elevations=elevations.tolist()
2061#        elevations.reverse()
2062#        elevations=asarray(elevations)
2063#        'print hmmm'
2064
2065
2066
2067    #Get missing values
2068    nan_ha = file_h.variables['HA'].missing_value[0]
2069    nan_ua = file_u.variables['UA'].missing_value[0]
2070    nan_va = file_v.variables['VA'].missing_value[0]
2071    if hasattr(file_e.variables[zname],'missing_value'):
2072        nan_e  = file_e.variables[zname].missing_value[0]
2073    else:
2074        nan_e = None
2075
2076    #Cleanup
2077    from Numeric import sometrue
2078
2079    missing = (amplitudes == nan_ha)
2080    if sometrue (missing):
2081        if fail_on_NaN:
2082            msg = 'NetCDFFile %s contains missing values'\
2083                  %(basename_in+'_ha.nc')
2084            raise msg
2085        else:
2086            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2087
2088    missing = (uspeed == nan_ua)
2089    if sometrue (missing):
2090        if fail_on_NaN:
2091            msg = 'NetCDFFile %s contains missing values'\
2092                  %(basename_in+'_ua.nc')
2093            raise msg
2094        else:
2095            uspeed = uspeed*(missing==0) + missing*NaN_filler
2096
2097    missing = (vspeed == nan_va)
2098    if sometrue (missing):
2099        if fail_on_NaN:
2100            msg = 'NetCDFFile %s contains missing values'\
2101                  %(basename_in+'_va.nc')
2102            raise msg
2103        else:
2104            vspeed = vspeed*(missing==0) + missing*NaN_filler
2105
2106
2107    missing = (elevations == nan_e)
2108    if sometrue (missing):
2109        if fail_on_NaN:
2110            msg = 'NetCDFFile %s contains missing values'\
2111                  %(basename_in+'_e.nc')
2112            raise msg
2113        else:
2114            elevations = elevations*(missing==0) + missing*NaN_filler
2115
2116    #######
2117
2118
2119
2120    number_of_times = times.shape[0]
2121    number_of_latitudes = latitudes.shape[0]
2122    number_of_longitudes = longitudes.shape[0]
2123
2124    assert amplitudes.shape[0] == number_of_times
2125    assert amplitudes.shape[1] == number_of_latitudes
2126    assert amplitudes.shape[2] == number_of_longitudes
2127
2128    if verbose:
2129        print '------------------------------------------------'
2130        print 'Statistics:'
2131        print '  Extent (lat/lon):'
2132        print '    lat in [%f, %f], len(lat) == %d'\
2133              %(min(latitudes.flat), max(latitudes.flat),
2134                len(latitudes.flat))
2135        print '    lon in [%f, %f], len(lon) == %d'\
2136              %(min(longitudes.flat), max(longitudes.flat),
2137                len(longitudes.flat))
2138        print '    t in [%f, %f], len(t) == %d'\
2139              %(min(times.flat), max(times.flat), len(times.flat))
2140
2141        q = amplitudes.flat
2142        name = 'Amplitudes (ha) [cm]'
2143        print %s in [%f, %f]' %(name, min(q), max(q))
2144
2145        q = uspeed.flat
2146        name = 'Speeds (ua) [cm/s]'
2147        print %s in [%f, %f]' %(name, min(q), max(q))
2148
2149        q = vspeed.flat
2150        name = 'Speeds (va) [cm/s]'
2151        print %s in [%f, %f]' %(name, min(q), max(q))
2152
2153        q = elevations.flat
2154        name = 'Elevations (e) [m]'
2155        print %s in [%f, %f]' %(name, min(q), max(q))
2156
2157
2158    #print number_of_latitudes, number_of_longitudes
2159    number_of_points = number_of_latitudes*number_of_longitudes
2160    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2161
2162
2163    file_h.close()
2164    file_u.close()
2165    file_v.close()
2166    file_e.close()
2167
2168
2169    # NetCDF file definition
2170    outfile = NetCDFFile(swwname, 'w')
2171
2172    #Create new file
2173    outfile.institution = 'Geoscience Australia'
2174    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2175                          %(basename_in + '_ha.nc',
2176                            basename_in + '_ua.nc',
2177                            basename_in + '_va.nc',
2178                            basename_in + '_e.nc')
2179
2180
2181    #For sww compatibility
2182    outfile.smoothing = 'Yes'
2183    outfile.order = 1
2184
2185    #Start time in seconds since the epoch (midnight 1/1/1970)
2186    outfile.starttime = starttime = times[0]
2187    times = times - starttime  #Store relative times
2188
2189    # dimension definitions
2190    outfile.createDimension('number_of_volumes', number_of_volumes)
2191
2192    outfile.createDimension('number_of_vertices', 3)
2193    outfile.createDimension('number_of_points', number_of_points)
2194
2195
2196    #outfile.createDimension('number_of_timesteps', len(times))
2197    outfile.createDimension('number_of_timesteps', len(times))
2198
2199    # variable definitions
2200    outfile.createVariable('x', precision, ('number_of_points',))
2201    outfile.createVariable('y', precision, ('number_of_points',))
2202    outfile.createVariable('elevation', precision, ('number_of_points',))
2203
2204    #FIXME: Backwards compatibility
2205    outfile.createVariable('z', precision, ('number_of_points',))
2206    #################################
2207
2208    outfile.createVariable('volumes', Int, ('number_of_volumes',
2209                                            'number_of_vertices'))
2210
2211    outfile.createVariable('time', precision,
2212                           ('number_of_timesteps',))
2213
2214    outfile.createVariable('stage', precision,
2215                           ('number_of_timesteps',
2216                            'number_of_points'))
2217
2218    outfile.createVariable('xmomentum', precision,
2219                           ('number_of_timesteps',
2220                            'number_of_points'))
2221
2222    outfile.createVariable('ymomentum', precision,
2223                           ('number_of_timesteps',
2224                            'number_of_points'))
2225
2226
2227    #Store
2228    from coordinate_transforms.redfearn import redfearn
2229    x = zeros(number_of_points, Float)  #Easting
2230    y = zeros(number_of_points, Float)  #Northing
2231
2232
2233    if verbose: print 'Making triangular grid'
2234    #Check zone boundaries
2235    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2236
2237    vertices = {}
2238    i = 0
2239    for k, lat in enumerate(latitudes):       #Y direction
2240        for l, lon in enumerate(longitudes):  #X direction
2241
2242            vertices[l,k] = i
2243
2244            zone, easting, northing = redfearn(lat,lon)
2245
2246            msg = 'Zone boundary crossed at longitude =', lon
2247            #assert zone == refzone, msg
2248            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2249            x[i] = easting
2250            y[i] = northing
2251            i += 1
2252
2253
2254    #Construct 2 triangles per 'rectangular' element
2255    volumes = []
2256    for l in range(number_of_longitudes-1):    #X direction
2257        for k in range(number_of_latitudes-1): #Y direction
2258            v1 = vertices[l,k+1]
2259            v2 = vertices[l,k]
2260            v3 = vertices[l+1,k+1]
2261            v4 = vertices[l+1,k]
2262
2263            volumes.append([v1,v2,v3]) #Upper element
2264            volumes.append([v4,v3,v2]) #Lower element
2265
2266    volumes = array(volumes)
2267
2268    if origin == None:
2269        zone = refzone
2270        xllcorner = min(x)
2271        yllcorner = min(y)
2272    else:
2273        zone = origin[0]
2274        xllcorner = origin[1]
2275        yllcorner = origin[2]
2276
2277
2278    outfile.xllcorner = xllcorner
2279    outfile.yllcorner = yllcorner
2280    outfile.zone = zone
2281
2282
2283    if elevation is not None:
2284        z = elevation
2285    else:
2286        if inverted_bathymetry:
2287            z = -1*elevations
2288        else:
2289            z = elevations
2290        #FIXME: z should be obtained from MOST and passed in here
2291
2292    from Numeric import resize
2293    z = resize(z,outfile.variables['z'][:].shape)
2294    outfile.variables['x'][:] = x - xllcorner
2295    outfile.variables['y'][:] = y - yllcorner
2296    outfile.variables['z'][:] = z
2297    outfile.variables['elevation'][:] = z  #FIXME HACK
2298    outfile.variables['time'][:] = times   #Store time relative
2299    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2300
2301
2302
2303    #Time stepping
2304    stage = outfile.variables['stage']
2305    xmomentum = outfile.variables['xmomentum']
2306    ymomentum = outfile.variables['ymomentum']
2307
2308    if verbose: print 'Converting quantities'
2309    n = len(times)
2310    for j in range(n):
2311        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2312        i = 0
2313        for k in range(number_of_latitudes):      #Y direction
2314            for l in range(number_of_longitudes): #X direction
2315                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2316                stage[j,i] = w
2317                h = w - z[i]
2318                xmomentum[j,i] = uspeed[j,k,l]/100*h
2319                ymomentum[j,i] = vspeed[j,k,l]/100*h
2320                i += 1
2321
2322
2323    if verbose:
2324        x = outfile.variables['x'][:]
2325        y = outfile.variables['y'][:]
2326        print '------------------------------------------------'
2327        print 'Statistics of output file:'
2328        print '  Name: %s' %swwname
2329        print '  Reference:'
2330        print '    Lower left corner: [%f, %f]'\
2331              %(xllcorner, yllcorner)
2332        print '    Start time: %f' %starttime
2333        print '  Extent:'
2334        print '    x [m] in [%f, %f], len(x) == %d'\
2335              %(min(x.flat), max(x.flat), len(x.flat))
2336        print '    y [m] in [%f, %f], len(y) == %d'\
2337              %(min(y.flat), max(y.flat), len(y.flat))
2338        print '    t [s] in [%f, %f], len(t) == %d'\
2339              %(min(times), max(times), len(times))
2340        print '  Quantities [SI units]:'
2341        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2342            q = outfile.variables[name][:].flat
2343            print '    %s in [%f, %f]' %(name, min(q), max(q))
2344
2345
2346
2347
2348    outfile.close()
2349
2350
2351
2352
2353def timefile2netcdf(filename, quantity_names = None):
2354    """Template for converting typical text files with time series to
2355    NetCDF tms file.
2356
2357
2358    The file format is assumed to be either two fields separated by a comma:
2359
2360        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2361
2362    E.g
2363
2364      31/08/04 00:00:00, 1.328223 0 0
2365      31/08/04 00:15:00, 1.292912 0 0
2366
2367    will provide a time dependent function f(t) with three attributes
2368
2369    filename is assumed to be the rootname with extenisons .txt and .sww
2370    """
2371
2372    import time, calendar
2373    from Numeric import array
2374    from config import time_format
2375    from util import ensure_numeric
2376
2377
2378    fid = open(filename + '.txt')
2379    line = fid.readline()
2380    fid.close()
2381
2382    fields = line.split(',')
2383    msg = 'File %s must have the format date, value0 value1 value2 ...'
2384    assert len(fields) == 2, msg
2385
2386    try:
2387        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2388    except ValueError:
2389        msg = 'First field in file %s must be' %filename
2390        msg += ' date-time with format %s.\n' %time_format
2391        msg += 'I got %s instead.' %fields[0]
2392        raise msg
2393
2394
2395    #Split values
2396    values = []
2397    for value in fields[1].split():
2398        values.append(float(value))
2399
2400    q = ensure_numeric(values)
2401
2402    msg = 'ERROR: File must contain at least one independent value'
2403    assert len(q.shape) == 1, msg
2404
2405
2406
2407    #Read times proper
2408    from Numeric import zeros, Float, alltrue
2409    from config import time_format
2410    import time, calendar
2411
2412    fid = open(filename + '.txt')
2413    lines = fid.readlines()
2414    fid.close()
2415
2416    N = len(lines)
2417    d = len(q)
2418
2419    T = zeros(N, Float)       #Time
2420    Q = zeros((N, d), Float)  #Values
2421
2422    for i, line in enumerate(lines):
2423        fields = line.split(',')
2424        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2425       
2426        T[i] = realtime - starttime
2427
2428        for j, value in enumerate(fields[1].split()):
2429            Q[i, j] = float(value)
2430
2431    msg = 'File %s must list time as a monotonuosly ' %filename
2432    msg += 'increasing sequence'
2433    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2434
2435
2436    #Create NetCDF file
2437    from Scientific.IO.NetCDF import NetCDFFile
2438
2439    fid = NetCDFFile(filename + '.tms', 'w')
2440
2441
2442    fid.institution = 'Geoscience Australia'
2443    fid.description = 'Time series'
2444
2445
2446    #Reference point
2447    #Start time in seconds since the epoch (midnight 1/1/1970)
2448    #FIXME: Use Georef
2449    fid.starttime = starttime
2450   
2451   
2452    # dimension definitions
2453    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2454    #fid.createDimension('number_of_vertices', 3)
2455
2456
2457    fid.createDimension('number_of_timesteps', len(T))
2458
2459    fid.createVariable('time', Float, ('number_of_timesteps',))
2460
2461    fid.variables['time'][:] = T
2462   
2463    for i in range(Q.shape[1]):
2464        try:
2465            name = quantity_names[i]
2466        except:   
2467            name = 'Attribute%d'%i
2468           
2469        fid.createVariable(name, Float, ('number_of_timesteps',))
2470        fid.variables[name][:] = Q[:,i]       
2471
2472    fid.close()
2473
2474
2475def extent_sww(file_name):
2476    """
2477    Read in an sww file.
2478
2479    Input;
2480    file_name - the sww file
2481
2482    Output;
2483    z - Vector of bed elevation
2484    volumes - Array.  Each row has 3 values, representing
2485    the vertices that define the volume
2486    time - Vector of the times where there is stage information
2487    stage - array with respect to time and vertices (x,y)
2488    """
2489
2490
2491    from Scientific.IO.NetCDF import NetCDFFile
2492
2493    #Check contents
2494    #Get NetCDF
2495    fid = NetCDFFile(file_name, 'r')
2496
2497    # Get the variables
2498    x = fid.variables['x'][:]
2499    y = fid.variables['y'][:]
2500    stage = fid.variables['stage'][:]
2501    #print "stage",stage
2502    #print "stage.shap",stage.shape
2503    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2504    #print "min(stage)",min(stage)
2505
2506    fid.close()
2507
2508    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2509
2510
2511def sww2domain(filename,boundary=None,t=None,\
2512               fail_if_NaN=True,NaN_filler=0\
2513               ,verbose = True,very_verbose = False):
2514    """
2515    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2516
2517    Boundary is not recommended if domian.smooth is not selected, as it
2518    uses unique coordinates, but not unique boundaries. This means that
2519    the boundary file will not be compatable with the coordiantes, and will
2520    give a different final boundary, or crash.
2521    """
2522    NaN=9.969209968386869e+036
2523    #initialise NaN.
2524
2525    from Scientific.IO.NetCDF import NetCDFFile
2526    from shallow_water import Domain
2527    from Numeric import asarray, transpose, resize
2528
2529    if verbose: print 'Reading from ', filename
2530    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2531    time = fid.variables['time']       #Timesteps
2532    if t is None:
2533        t = time[-1]
2534    time_interp = get_time_interp(time,t)
2535
2536    # Get the variables as Numeric arrays
2537    x = fid.variables['x'][:]             #x-coordinates of vertices
2538    y = fid.variables['y'][:]             #y-coordinates of vertices
2539    elevation = fid.variables['elevation']     #Elevation
2540    stage = fid.variables['stage']     #Water level
2541    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2542    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2543
2544    starttime = fid.starttime[0]
2545    volumes = fid.variables['volumes'][:] #Connectivity
2546    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2547
2548    conserved_quantities = []
2549    interpolated_quantities = {}
2550    other_quantities = []
2551
2552    # get geo_reference
2553    #sww files don't have to have a geo_ref
2554    try:
2555        geo_reference = Geo_reference(NetCDFObject=fid)
2556    except: #AttributeError, e:
2557        geo_reference = None
2558
2559    if verbose: print '    getting quantities'
2560    for quantity in fid.variables.keys():
2561        dimensions = fid.variables[quantity].dimensions
2562        if 'number_of_timesteps' in dimensions:
2563            conserved_quantities.append(quantity)
2564            interpolated_quantities[quantity]=\
2565                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2566        else: other_quantities.append(quantity)
2567
2568    other_quantities.remove('x')
2569    other_quantities.remove('y')
2570    other_quantities.remove('z')
2571    other_quantities.remove('volumes')
2572
2573    conserved_quantities.remove('time')
2574
2575    if verbose: print '    building domain'
2576#    From domain.Domain:
2577#    domain = Domain(coordinates, volumes,\
2578#                    conserved_quantities = conserved_quantities,\
2579#                    other_quantities = other_quantities,zone=zone,\
2580#                    xllcorner=xllcorner, yllcorner=yllcorner)
2581
2582#   From shallow_water.Domain:
2583    coordinates=coordinates.tolist()
2584    volumes=volumes.tolist()
2585    #FIXME:should this be in mesh?(peter row)
2586    if fid.smoothing == 'Yes': unique = False
2587    else: unique = True
2588    if unique:
2589        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2590
2591
2592    domain = Domain(coordinates, volumes, boundary)
2593
2594    if not boundary is None:
2595        domain.boundary = boundary
2596
2597    domain.geo_reference = geo_reference
2598
2599    domain.starttime=float(starttime)+float(t)
2600    domain.time=0.0
2601
2602    for quantity in other_quantities:
2603        try:
2604            NaN = fid.variables[quantity].missing_value
2605        except:
2606            pass #quantity has no missing_value number
2607        X = fid.variables[quantity][:]
2608        if very_verbose:
2609            print '       ',quantity
2610            print '        NaN =',NaN
2611            print '        max(X)'
2612            print '       ',max(X)
2613            print '        max(X)==NaN'
2614            print '       ',max(X)==NaN
2615            print ''
2616        if (max(X)==NaN) or (min(X)==NaN):
2617            if fail_if_NaN:
2618                msg = 'quantity "%s" contains no_data entry'%quantity
2619                raise msg
2620            else:
2621                data = (X<>NaN)
2622                X = (X*data)+(data==0)*NaN_filler
2623        if unique:
2624            X = resize(X,(len(X)/3,3))
2625        domain.set_quantity(quantity,X)
2626#
2627    for quantity in conserved_quantities:
2628        try:
2629            NaN = fid.variables[quantity].missing_value
2630        except:
2631            pass #quantity has no missing_value number
2632        X = interpolated_quantities[quantity]
2633        if very_verbose:
2634            print '       ',quantity
2635            print '        NaN =',NaN
2636            print '        max(X)'
2637            print '       ',max(X)
2638            print '        max(X)==NaN'
2639            print '       ',max(X)==NaN
2640            print ''
2641        if (max(X)==NaN) or (min(X)==NaN):
2642            if fail_if_NaN:
2643                msg = 'quantity "%s" contains no_data entry'%quantity
2644                raise msg
2645            else:
2646                data = (X<>NaN)
2647                X = (X*data)+(data==0)*NaN_filler
2648        if unique:
2649            X = resize(X,(X.shape[0]/3,3))
2650        domain.set_quantity(quantity,X)
2651    fid.close()
2652    return domain
2653
2654def interpolated_quantity(saved_quantity,time_interp):
2655
2656    #given an index and ratio, interpolate quantity with respect to time.
2657    index,ratio = time_interp
2658    Q = saved_quantity
2659    if ratio > 0:
2660        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2661    else:
2662        q = Q[index]
2663    #Return vector of interpolated values
2664    return q
2665
2666def get_time_interp(time,t=None):
2667    #Finds the ratio and index for time interpolation.
2668    #It is borrowed from previous pyvolution code.
2669    if t is None:
2670        t=time[-1]
2671        index = -1
2672        ratio = 0.
2673    else:
2674        T = time
2675        tau = t
2676        index=0
2677        msg = 'Time interval derived from file %s [%s:%s]'\
2678            %('FIXMEfilename', T[0], T[-1])
2679        msg += ' does not match model time: %s' %tau
2680        if tau < time[0]: raise msg
2681        if tau > time[-1]: raise msg
2682        while tau > time[index]: index += 1
2683        while tau < time[index]: index -= 1
2684        if tau == time[index]:
2685            #Protect against case where tau == time[-1] (last time)
2686            # - also works in general when tau == time[i]
2687            ratio = 0
2688        else:
2689            #t is now between index and index+1
2690            ratio = (tau - time[index])/(time[index+1] - time[index])
2691    return (index,ratio)
2692
2693
2694def weed(coordinates,volumes,boundary = None):
2695    if type(coordinates)=='array':
2696        coordinates = coordinates.tolist()
2697    if type(volumes)=='array':
2698        volumes = volumes.tolist()
2699
2700    unique = False
2701    point_dict = {}
2702    same_point = {}
2703    for i in range(len(coordinates)):
2704        point = tuple(coordinates[i])
2705        if point_dict.has_key(point):
2706            unique = True
2707            same_point[i]=point
2708            #to change all point i references to point j
2709        else:
2710            point_dict[point]=i
2711            same_point[i]=point
2712
2713    coordinates = []
2714    i = 0
2715    for point in point_dict.keys():
2716        point = tuple(point)
2717        coordinates.append(list(point))
2718        point_dict[point]=i
2719        i+=1
2720
2721
2722    for volume in volumes:
2723        for i in range(len(volume)):
2724            index = volume[i]
2725            if index>-1:
2726                volume[i]=point_dict[same_point[index]]
2727
2728    new_boundary = {}
2729    if not boundary is None:
2730        for segment in boundary.keys():
2731            point0 = point_dict[same_point[segment[0]]]
2732            point1 = point_dict[same_point[segment[1]]]
2733            label = boundary[segment]
2734            #FIXME should the bounday attributes be concaterated
2735            #('exterior, pond') or replaced ('pond')(peter row)
2736
2737            if new_boundary.has_key((point0,point1)):
2738                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2739                                              #+','+label
2740
2741            elif new_boundary.has_key((point1,point0)):
2742                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2743                                              #+','+label
2744            else: new_boundary[(point0,point1)]=label
2745
2746        boundary = new_boundary
2747
2748    return coordinates,volumes,boundary
2749
2750
2751
2752
2753
2754
2755
2756
2757
2758
2759###############################################
2760#OBSOLETE STUFF
2761#Native checkpoint format.
2762#Information needed to recreate a state is preserved
2763#FIXME: Rethink and maybe use netcdf format
2764def cpt_variable_writer(filename, t, v0, v1, v2):
2765    """Store all conserved quantities to file
2766    """
2767
2768    M, N = v0.shape
2769
2770    FN = create_filename(filename, 'cpt', M, t)
2771    #print 'Writing to %s' %FN
2772
2773    fid = open(FN, 'w')
2774    for i in range(M):
2775        for j in range(N):
2776            fid.write('%.16e ' %v0[i,j])
2777        for j in range(N):
2778            fid.write('%.16e ' %v1[i,j])
2779        for j in range(N):
2780            fid.write('%.16e ' %v2[i,j])
2781
2782        fid.write('\n')
2783    fid.close()
2784
2785
2786def cpt_variable_reader(filename, t, v0, v1, v2):
2787    """Store all conserved quantities to file
2788    """
2789
2790    M, N = v0.shape
2791
2792    FN = create_filename(filename, 'cpt', M, t)
2793    #print 'Reading from %s' %FN
2794
2795    fid = open(FN)
2796
2797
2798    for i in range(M):
2799        values = fid.readline().split() #Get one line
2800
2801        for j in range(N):
2802            v0[i,j] = float(values[j])
2803            v1[i,j] = float(values[3+j])
2804            v2[i,j] = float(values[6+j])
2805
2806    fid.close()
2807
2808def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2809    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2810    elevation.
2811    FIXME: Not in use pt
2812    """
2813
2814    M, N = v0.shape
2815
2816
2817    print X0
2818    import sys; sys.exit()
2819    FN = create_filename(filename, 'cpt', M)
2820    print 'Writing to %s' %FN
2821
2822    fid = open(FN, 'w')
2823    for i in range(M):
2824        for j in range(2):
2825            fid.write('%.16e ' %X0[i,j])   #x, y
2826        for j in range(N):
2827            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2828
2829        for j in range(2):
2830            fid.write('%.16e ' %X1[i,j])   #x, y
2831        for j in range(N):
2832            fid.write('%.16e ' %v1[i,j])
2833
2834        for j in range(2):
2835            fid.write('%.16e ' %X2[i,j])   #x, y
2836        for j in range(N):
2837            fid.write('%.16e ' %v2[i,j])
2838
2839        fid.write('\n')
2840    fid.close()
2841
2842
2843
2844#Function for storing out to e.g. visualisation
2845#FIXME: Do we want this?
2846#FIXME: Not done yet for this version
2847def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2848    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2849    """
2850
2851    M, N = v0.shape
2852
2853    FN = create_filename(filename, 'dat', M)
2854    #print 'Writing to %s' %FN
2855
2856    fid = open(FN, 'w')
2857    for i in range(M):
2858        for j in range(2):
2859            fid.write('%f ' %X0[i,j])   #x, y
2860        fid.write('%f ' %v0[i,0])       #z
2861
2862        for j in range(2):
2863            fid.write('%f ' %X1[i,j])   #x, y
2864        fid.write('%f ' %v1[i,0])       #z
2865
2866        for j in range(2):
2867            fid.write('%f ' %X2[i,j])   #x, y
2868        fid.write('%f ' %v2[i,0])       #z
2869
2870        fid.write('\n')
2871    fid.close()
2872
2873
2874
2875def dat_variable_writer(filename, t, v0, v1, v2):
2876    """Store water height to file
2877    """
2878
2879    M, N = v0.shape
2880
2881    FN = create_filename(filename, 'dat', M, t)
2882    #print 'Writing to %s' %FN
2883
2884    fid = open(FN, 'w')
2885    for i in range(M):
2886        fid.write('%.4f ' %v0[i,0])
2887        fid.write('%.4f ' %v1[i,0])
2888        fid.write('%.4f ' %v2[i,0])
2889
2890        fid.write('\n')
2891    fid.close()
2892
2893
2894def read_sww(filename):
2895    """Read sww Net CDF file containing Shallow Water Wave simulation
2896
2897    The integer array volumes is of shape Nx3 where N is the number of
2898    triangles in the mesh.
2899
2900    Each entry in volumes is an index into the x,y arrays (the location).
2901
2902    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2903    number_of_timesteps, number_of_points.
2904
2905    The momentum is not always stored.
2906
2907    """
2908    from Scientific.IO.NetCDF import NetCDFFile
2909    print 'Reading from ', filename
2910    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2911#latitude, longitude
2912    # Get the variables as Numeric arrays
2913    x = fid.variables['x']             #x-coordinates of vertices
2914    y = fid.variables['y']             #y-coordinates of vertices
2915    z = fid.variables['elevation']     #Elevation
2916    time = fid.variables['time']       #Timesteps
2917    stage = fid.variables['stage']     #Water level
2918    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2919    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2920
2921    volumes = fid.variables['volumes'] #Connectivity
2922
2923
2924def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2925                 verbose=False):
2926    """Read Digitial Elevation model from the following NetCDF format (.dem)
2927
2928    Example:
2929
2930    ncols         3121
2931    nrows         1800
2932    xllcorner     722000
2933    yllcorner     5893000
2934    cellsize      25
2935    NODATA_value  -9999
2936    138.3698 137.4194 136.5062 135.5558 ..........
2937
2938    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2939    """
2940
2941    import os
2942    from Scientific.IO.NetCDF import NetCDFFile
2943    from Numeric import Float, zeros, sum, reshape, equal
2944
2945    root = basename_in
2946    inname = root + '.dem'
2947
2948    #Open existing netcdf file to read
2949    infile = NetCDFFile(inname, 'r')
2950    if verbose: print 'Reading DEM from %s' %inname
2951
2952    #Read metadata
2953    ncols = infile.ncols[0]
2954    nrows = infile.nrows[0]
2955    xllcorner = infile.xllcorner[0]
2956    yllcorner = infile.yllcorner[0]
2957    cellsize = infile.cellsize[0]
2958    NODATA_value = infile.NODATA_value[0]
2959    zone = infile.zone[0]
2960    false_easting = infile.false_easting[0]
2961    false_northing = infile.false_northing[0]
2962    projection = infile.projection
2963    datum = infile.datum
2964    units = infile.units
2965
2966    dem_elevation = infile.variables['elevation']
2967
2968    #Get output file name
2969    if basename_out == None:
2970        outname = root + '_' + repr(cellsize_new) + '.dem'
2971    else:
2972        outname = basename_out + '.dem'
2973
2974    if verbose: print 'Write decimated NetCDF file to %s' %outname
2975
2976    #Determine some dimensions for decimated grid
2977    (nrows_stencil, ncols_stencil) = stencil.shape
2978    x_offset = ncols_stencil / 2
2979    y_offset = nrows_stencil / 2
2980    cellsize_ratio = int(cellsize_new / cellsize)
2981    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2982    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2983
2984    #Open netcdf file for output
2985    outfile = NetCDFFile(outname, 'w')
2986
2987    #Create new file
2988    outfile.institution = 'Geoscience Australia'
2989    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2990                           'of spatial point data'
2991    #Georeferencing
2992    outfile.zone = zone
2993    outfile.projection = projection
2994    outfile.datum = datum
2995    outfile.units = units
2996
2997    outfile.cellsize = cellsize_new
2998    outfile.NODATA_value = NODATA_value
2999    outfile.false_easting = false_easting
3000    outfile.false_northing = false_northing
3001
3002    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3003    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3004    outfile.ncols = ncols_new
3005    outfile.nrows = nrows_new
3006
3007    # dimension definition
3008    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3009
3010    # variable definition
3011    outfile.createVariable('elevation', Float, ('number_of_points',))
3012
3013    # Get handle to the variable
3014    elevation = outfile.variables['elevation']
3015
3016    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3017
3018    #Store data
3019    global_index = 0
3020    for i in range(nrows_new):
3021        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3022        lower_index = global_index
3023        telev =  zeros(ncols_new, Float)
3024        local_index = 0
3025        trow = i * cellsize_ratio
3026
3027        for j in range(ncols_new):
3028            tcol = j * cellsize_ratio
3029            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3030
3031            #if dem contains 1 or more NODATA_values set value in
3032            #decimated dem to NODATA_value, else compute decimated
3033            #value using stencil
3034            if sum(sum(equal(tmp, NODATA_value))) > 0:
3035                telev[local_index] = NODATA_value
3036            else:
3037                telev[local_index] = sum(sum(tmp * stencil))
3038
3039            global_index += 1
3040            local_index += 1
3041
3042        upper_index = global_index
3043
3044        elevation[lower_index:upper_index] = telev
3045
3046    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3047
3048    infile.close()
3049    outfile.close()
3050
3051
3052   
3053def sww2asc_obsolete(basename_in, basename_out = None,
3054            quantity = None,
3055            timestep = None,
3056            reduction = None,
3057            cellsize = 10,
3058            verbose = False,
3059            origin = None):
3060    """Read SWW file and convert to Digitial Elevation model format (.asc)
3061
3062    Example:
3063
3064    ncols         3121
3065    nrows         1800
3066    xllcorner     722000
3067    yllcorner     5893000
3068    cellsize      25
3069    NODATA_value  -9999
3070    138.3698 137.4194 136.5062 135.5558 ..........
3071
3072    Also write accompanying file with same basename_in but extension .prj
3073    used to fix the UTM zone, datum, false northings and eastings.
3074
3075    The prj format is assumed to be as
3076
3077    Projection    UTM
3078    Zone          56
3079    Datum         WGS84
3080    Zunits        NO
3081    Units         METERS
3082    Spheroid      WGS84
3083    Xshift        0.0000000000
3084    Yshift        10000000.0000000000
3085    Parameters
3086
3087
3088    if quantity is given, out values from quantity otherwise default to
3089    elevation
3090
3091    if timestep (an index) is given, output quantity at that timestep
3092
3093    if reduction is given use that to reduce quantity over all timesteps.
3094
3095    """
3096    from Numeric import array, Float, concatenate, NewAxis, zeros,\
3097         sometrue
3098    from utilities.polygon import inside_polygon
3099
3100    #FIXME: Should be variable
3101    datum = 'WGS84'
3102    false_easting = 500000
3103    false_northing = 10000000
3104
3105    if quantity is None:
3106        quantity = 'elevation'
3107
3108    if reduction is None:
3109        reduction = max
3110
3111    if basename_out is None:
3112        basename_out = basename_in + '_%s' %quantity
3113
3114    swwfile = basename_in + '.sww'
3115    ascfile = basename_out + '.asc'
3116    prjfile = basename_out + '.prj'
3117
3118
3119    if verbose: print 'Reading from %s' %swwfile
3120    #Read sww file
3121    from Scientific.IO.NetCDF import NetCDFFile
3122    fid = NetCDFFile(swwfile)
3123
3124    #Get extent and reference
3125    x = fid.variables['x'][:]
3126    y = fid.variables['y'][:]
3127    volumes = fid.variables['volumes'][:]
3128
3129    ymin = min(y); ymax = max(y)
3130    xmin = min(x); xmax = max(x)
3131
3132    number_of_timesteps = fid.dimensions['number_of_timesteps']
3133    number_of_points = fid.dimensions['number_of_points']
3134    if origin is None:
3135
3136        #Get geo_reference
3137        #sww files don't have to have a geo_ref
3138        try:
3139            geo_reference = Geo_reference(NetCDFObject=fid)
3140        except AttributeError, e:
3141            geo_reference = Geo_reference() #Default georef object
3142
3143        xllcorner = geo_reference.get_xllcorner()
3144        yllcorner = geo_reference.get_yllcorner()
3145        zone = geo_reference.get_zone()
3146    else:
3147        zone = origin[0]
3148        xllcorner = origin[1]
3149        yllcorner = origin[2]
3150
3151
3152    #Get quantity and reduce if applicable
3153    if verbose: print 'Reading quantity %s' %quantity
3154
3155    if quantity.lower() == 'depth':
3156        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3157    else:
3158        q = fid.variables[quantity][:]
3159
3160
3161    if len(q.shape) == 2:
3162        if verbose: print 'Reducing quantity %s' %quantity
3163        q_reduced = zeros( number_of_points, Float )
3164
3165        for k in range(number_of_points):
3166            q_reduced[k] = reduction( q[:,k] )
3167
3168        q = q_reduced
3169
3170    #Now q has dimension: number_of_points
3171
3172    #Create grid and update xll/yll corner and x,y
3173    if verbose: print 'Creating grid'
3174    ncols = int((xmax-xmin)/cellsize)+1
3175    nrows = int((ymax-ymin)/cellsize)+1
3176
3177    newxllcorner = xmin+xllcorner
3178    newyllcorner = ymin+yllcorner
3179
3180    x = x+xllcorner-newxllcorner
3181    y = y+yllcorner-newyllcorner
3182
3183    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3184    assert len(vertex_points.shape) == 2
3185
3186
3187    from Numeric import zeros, Float
3188    grid_points = zeros ( (ncols*nrows, 2), Float )
3189
3190
3191    for i in xrange(nrows):
3192        yg = i*cellsize
3193        for j in xrange(ncols):
3194            xg = j*cellsize
3195            k = i*ncols + j
3196
3197            grid_points[k,0] = xg
3198            grid_points[k,1] = yg
3199
3200    #Interpolate
3201    from least_squares import Interpolation
3202   
3203
3204    #FIXME: This should be done with precrop = True, otherwise it'll
3205    #take forever. With expand_search  set to False, some grid points might
3206    #miss out....
3207
3208    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3209                           precrop = False, expand_search = True,
3210                           verbose = verbose)
3211
3212    #Interpolate using quantity values
3213    if verbose: print 'Interpolating'
3214    grid_values = interp.interpolate(q).flat
3215
3216    #Write
3217    #Write prj file
3218    if verbose: print 'Writing %s' %prjfile
3219    prjid = open(prjfile, 'w')
3220    prjid.write('Projection    %s\n' %'UTM')
3221    prjid.write('Zone          %d\n' %zone)
3222    prjid.write('Datum         %s\n' %datum)
3223    prjid.write('Zunits        NO\n')
3224    prjid.write('Units         METERS\n')
3225    prjid.write('Spheroid      %s\n' %datum)
3226    prjid.write('Xshift        %d\n' %false_easting)
3227    prjid.write('Yshift        %d\n' %false_northing)
3228    prjid.write('Parameters\n')
3229    prjid.close()
3230
3231
3232   
3233    if verbose: print 'Writing %s' %ascfile
3234    NODATA_value = -9999
3235 
3236    ascid = open(ascfile, 'w')
3237
3238    ascid.write('ncols         %d\n' %ncols)
3239    ascid.write('nrows         %d\n' %nrows)
3240    ascid.write('xllcorner     %d\n' %newxllcorner)
3241    ascid.write('yllcorner     %d\n' %newyllcorner)
3242    ascid.write('cellsize      %f\n' %cellsize)
3243    ascid.write('NODATA_value  %d\n' %NODATA_value)
3244
3245
3246    #Get bounding polygon from mesh
3247    P = interp.mesh.get_boundary_polygon()
3248    inside_indices = inside_polygon(grid_points, P)
3249
3250    for i in range(nrows):
3251        if verbose and i%((nrows+10)/10)==0:
3252            print 'Doing row %d of %d' %(i, nrows)
3253
3254        for j in range(ncols):
3255            index = (nrows-i-1)*ncols+j
3256
3257            if sometrue(inside_indices == index):
3258                ascid.write('%f ' %grid_values[index])
3259            else:
3260                ascid.write('%d ' %NODATA_value)
3261
3262        ascid.write('\n')
3263
3264    #Close
3265    ascid.close()
3266    fid.close()
3267
3268def asc_csiro2sww(bath_dir, elevation_dir, sww_file, verbose=False):
3269    """
3270   
3271    assume:
3272    All files are in ersi asc format
3273
3274    4 types of information
3275    bathymetry
3276    elevation
3277    u velocity
3278    v velocity
3279
3280    Assume each type is in a seperate directory
3281    assume one bath file with extention .000
3282    """
3283    from Scientific.IO.NetCDF import NetCDFFile
3284    from Numeric import Float, Int, Int32, searchsorted, zeros, array
3285    from Numeric import allclose, around
3286       
3287    from coordinate_transforms.redfearn import redfearn
3288   
3289    precision = Float # So if we want to change the precision its done here
3290   
3291    # go in to the bath dir and load the 000 file,
3292    bath_files = os.listdir(bath_dir)
3293    #print "bath_files",bath_files
3294
3295    #fixme: make more general?
3296    bath_file = bath_files[0]
3297    bath_dir_file =  bath_dir + os.sep + bath_file
3298    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3299    #print "bath_metadata",bath_metadata
3300    #print "bath_grid",bath_grid
3301
3302    #Use the date.time of the bath file as a basis for
3303    #the start time for other files
3304    base_start = bath_file[-12:]
3305    #print "base_start",base_start
3306   
3307    #go into the elevation dir and load the 000 file
3308    elevation_dir_file = elevation_dir  + os.sep + 'el' + base_start
3309    elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3310    #print "elevation_dir_file",elevation_dir_file
3311    #print "elevation_grid", elevation_grid
3312   
3313    assert bath_metadata == elevation_metadata
3314
3315
3316   
3317   
3318    number_of_latitudes = bath_grid.shape[0] 
3319    number_of_longitudes = bath_grid.shape[1]
3320    number_of_times = len(os.listdir(elevation_dir))
3321    number_of_points = number_of_latitudes*number_of_longitudes
3322    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3323
3324    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] for x in range(number_of_longitudes)]
3325
3326   
3327    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] for y in range(number_of_latitudes)]
3328
3329    # reverse order of lat, so the fist lat represents the first grid row
3330    latitudes.reverse()
3331   
3332    #print "number_of_latitudes", number_of_latitudes
3333    #print "number_of_longitudes", number_of_longitudes
3334    #print "number_of_times", number_of_times
3335    #print "latitudes", latitudes
3336    #print "longitudes", longitudes
3337   
3338    ######### WRITE THE SWW FILE #############
3339    # NetCDF file definition
3340    outfile = NetCDFFile(sww_file, 'w')
3341
3342    #Create new file
3343    outfile.institution = 'Geoscience Australia'
3344    outfile.description = 'Converted from XXX'
3345
3346
3347    #For sww compatibility
3348    outfile.smoothing = 'Yes'
3349    outfile.order = 1
3350   
3351    #FIXME(DSG) - hack to get a good sww file
3352    number_of_times = 1
3353   
3354    # dimension definitions
3355    outfile.createDimension('number_of_volumes', number_of_volumes)
3356
3357    outfile.createDimension('number_of_vertices', 3)
3358    outfile.createDimension('number_of_points', number_of_points)
3359    outfile.createDimension('number_of_timesteps', number_of_times)
3360
3361    # variable definitions
3362    outfile.createVariable('x', precision, ('number_of_points',))
3363    outfile.createVariable('y', precision, ('number_of_points',))
3364    outfile.createVariable('elevation', precision, ('number_of_points',))
3365
3366    #FIXME: Backwards compatibility
3367    outfile.createVariable('z', precision, ('number_of_points',))
3368    #################################
3369
3370    outfile.createVariable('volumes', Int, ('number_of_volumes',
3371                                            'number_of_vertices'))
3372
3373    outfile.createVariable('time', precision,
3374                           ('number_of_timesteps',))
3375
3376    outfile.createVariable('stage', precision,
3377                           ('number_of_timesteps',
3378                            'number_of_points'))
3379
3380    outfile.createVariable('xmomentum', precision,
3381                           ('number_of_timesteps',
3382                            'number_of_points'))
3383
3384    outfile.createVariable('ymomentum', precision,
3385                           ('number_of_timesteps',
3386                            'number_of_points'))
3387
3388    #Store
3389    from coordinate_transforms.redfearn import redfearn
3390    x = zeros(number_of_points, Float)  #Easting
3391    y = zeros(number_of_points, Float)  #Northing
3392
3393    if verbose: print 'Making triangular grid'
3394    #Get zone of 1st point.
3395    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3396   
3397    vertices = {}
3398    i = 0
3399    for k, lat in enumerate(latitudes):       
3400        for l, lon in enumerate(longitudes): 
3401
3402            vertices[l,k] = i
3403
3404            zone, easting, northing = redfearn(lat,lon)
3405
3406            msg = 'Zone boundary crossed at longitude =', lon
3407            #assert zone == refzone, msg
3408            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3409            x[i] = easting
3410            y[i] = northing
3411            i += 1
3412
3413
3414    #Construct 2 triangles per 'rectangular' element
3415    volumes = []
3416    for l in range(number_of_longitudes-1):    #X direction
3417        for k in range(number_of_latitudes-1): #Y direction
3418            v1 = vertices[l,k+1]
3419            v2 = vertices[l,k]
3420            v3 = vertices[l+1,k+1]
3421            v4 = vertices[l+1,k]
3422
3423            volumes.append([v1,v2,v3]) #Upper element
3424            volumes.append([v4,v3,v2]) #Lower element
3425
3426    volumes = array(volumes)
3427
3428    # FIXME - use coords
3429    zone = refzone
3430    xllcorner = min(x)
3431    yllcorner = min(y)
3432
3433    outfile.xllcorner = xllcorner
3434    outfile.yllcorner = yllcorner
3435    outfile.zone = zone
3436
3437
3438    z = resize(bath_grid,outfile.variables['z'][:].shape)
3439    outfile.variables['x'][:] = x - xllcorner
3440    outfile.variables['y'][:] = y - yllcorner
3441    outfile.variables['z'][:] = z
3442    #outfile.variables['elevation'][:] = z  #FIXME HACK
3443    #outfile.variables['time'][:] = times   #Store time relative
3444    #outfile.variables['time'][:] = [0.0]   #Store time relative
3445    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3446
3447    # do this to create an ok sww file?
3448    outfile.variables['time'][:] = [0]   #Store time relative
3449    outfile.variables['stage'] = z
3450    #outfile.variables['xmomentum'] = z
3451    #outfile.variables['ymomentum'] = z
3452   
3453    outfile.close()
3454   
3455def _read_asc(filename, verbose=False):
3456    """Read esri file from the following ASCII format (.asc)
3457
3458    Example:
3459
3460    ncols         3121
3461    nrows         1800
3462    xllcorner     722000
3463    yllcorner     5893000
3464    cellsize      25
3465    NODATA_value  -9999
3466    138.3698 137.4194 136.5062 135.5558 ..........
3467
3468    """
3469
3470    datafile = open(filename)
3471
3472    if verbose: print 'Reading DEM from %s' %(filename)
3473    lines = datafile.readlines()
3474    datafile.close()
3475
3476    if verbose: print 'Got', len(lines), ' lines'
3477
3478    ncols = int(lines.pop(0).split()[1].strip())
3479    nrows = int(lines.pop(0).split()[1].strip())
3480    xllcorner = float(lines.pop(0).split()[1].strip())
3481    yllcorner = float(lines.pop(0).split()[1].strip())
3482    cellsize = float(lines.pop(0).split()[1].strip())
3483    NODATA_value = float(lines.pop(0).split()[1].strip())
3484   
3485    assert len(lines) == nrows
3486   
3487    #Store data
3488    grid = []
3489   
3490    n = len(lines)
3491    for i, line in enumerate(lines):
3492        cells = line.split()
3493        assert len(cells) == ncols
3494        grid.append(array([float(x) for x in cells]))
3495    grid = array(grid)
3496
3497    return {'xllcorner':xllcorner,
3498            'yllcorner':yllcorner,
3499            'cellsize':cellsize,
3500            'NODATA_value':NODATA_value}, grid
3501   
Note: See TracBrowser for help on using the repository browser.