source: inundation/pyvolution/data_manager.py @ 2045

Last change on this file since 2045 was 2045, checked in by ole, 18 years ago

Changed expand_search back to True.

File size: 108.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55   
56from coordinate_transforms.geo_reference import Geo_reference
57
58def make_filename(s):
59    """Transform argument string into a suitable filename
60    """
61
62    s = s.strip()
63    s = s.replace(' ', '_')
64    s = s.replace('(', '')
65    s = s.replace(')', '')
66    s = s.replace('__', '_')
67
68    return s
69
70
71def check_dir(path, verbose=None):
72    """Check that specified path exists.
73    If path does not exist it will be created if possible
74
75    USAGE:
76       checkdir(path, verbose):
77
78    ARGUMENTS:
79        path -- Directory
80        verbose -- Flag verbose output (default: None)
81
82    RETURN VALUE:
83        Verified path including trailing separator
84
85    """
86
87    import os, sys
88    import os.path
89
90    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
91        unix = 0
92    else:
93        unix = 1
94
95
96    if path[-1] != os.sep:
97        path = path + os.sep  # Add separator for directories
98
99    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
100    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
101        try:
102            exitcode=os.mkdir(path)
103
104            # Change access rights if possible
105            #
106            if unix:
107                exitcode=os.system('chmod 775 '+path)
108            else:
109                pass  # FIXME: What about acces rights under Windows?
110
111            if verbose: print 'MESSAGE: Directory', path, 'created.'
112
113        except:
114            print 'WARNING: Directory', path, 'could not be created.'
115            if unix:
116                path = '/tmp/'
117            else:
118                path = 'C:'
119
120            print 'Using directory %s instead' %path
121
122    return(path)
123
124
125
126def del_dir(path):
127    """Recursively delete directory path and all its contents
128    """
129
130    import os
131
132    if os.path.isdir(path):
133        for file in os.listdir(path):
134            X = os.path.join(path, file)
135
136
137            if os.path.isdir(X) and not os.path.islink(X):
138                del_dir(X)
139            else:
140                try:
141                    os.remove(X)
142                except:
143                    print "Could not remove file %s" %X
144
145        os.rmdir(path)
146
147
148
149def create_filename(datadir, filename, format, size=None, time=None):
150
151    import os
152    #from config import data_dir
153
154    FN = check_dir(datadir) + filename
155
156    if size is not None:
157        FN += '_size%d' %size
158
159    if time is not None:
160        FN += '_time%.2f' %time
161
162    FN += '.' + format
163    return FN
164
165
166def get_files(datadir, filename, format, size):
167    """Get all file (names) with gven name, size and format
168    """
169
170    import glob
171
172    import os
173    #from config import data_dir
174
175    dir = check_dir(datadir)
176
177    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
178    return glob.glob(pattern)
179
180
181
182#Generic class for storing output to e.g. visualisation or checkpointing
183class Data_format:
184    """Generic interface to data formats
185    """
186
187
188    def __init__(self, domain, extension, mode = 'w'):
189        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
190                                        '''   'w' (write)'''+\
191                                        '''   'r' (read)''' +\
192                                        '''   'a' (append)'''
193
194        #Create filename
195        #self.filename = create_filename(domain.get_datadir(),
196        #domain.get_name(), extension, len(domain))
197
198
199        self.filename = create_filename(domain.get_datadir(),
200                        domain.get_name(), extension)
201
202        #print 'F', self.filename
203        self.timestep = 0
204        self.number_of_volumes = len(domain)
205        self.domain = domain
206
207
208        #FIXME: Should we have a general set_precision function?
209
210
211
212#Class for storing output to e.g. visualisation
213class Data_format_sww(Data_format):
214    """Interface to native NetCDF format (.sww) for storing model output
215
216    There are two kinds of data
217
218    1: Constant data: Vertex coordinates and field values. Stored once
219    2: Variable data: Conserved quantities. Stored once per timestep.
220
221    All data is assumed to reside at vertex locations.
222    """
223
224
225    def __init__(self, domain, mode = 'w',\
226                 max_size = 2000000000,
227                 recursion = False):
228        from Scientific.IO.NetCDF import NetCDFFile
229        from Numeric import Int, Float, Float32
230
231        self.precision = Float32 #Use single precision
232        if hasattr(domain, 'max_size'):
233            self.max_size = domain.max_size #file size max is 2Gig
234        else:
235            self.max_size = max_size
236        self.recursion = recursion
237        self.mode = mode
238
239        Data_format.__init__(self, domain, 'sww', mode)
240
241
242        # NetCDF file definition
243        fid = NetCDFFile(self.filename, mode)
244
245        if mode == 'w':
246
247            #Create new file
248            fid.institution = 'Geoscience Australia'
249            fid.description = 'Output from pyvolution suitable for plotting'
250
251            if domain.smooth:
252                fid.smoothing = 'Yes'
253            else:
254                fid.smoothing = 'No'
255
256            fid.order = domain.default_order
257           
258            if hasattr(domain, 'texture'):                 
259                fid.texture = domain.texture
260            #else:                         
261            #    fid.texture = 'None'       
262
263            #Reference point
264            #Start time in seconds since the epoch (midnight 1/1/1970)
265            #FIXME: Use Georef
266            fid.starttime = domain.starttime
267
268            # dimension definitions
269            fid.createDimension('number_of_volumes', self.number_of_volumes)
270            fid.createDimension('number_of_vertices', 3)
271
272            if domain.smooth is True:
273                fid.createDimension('number_of_points', len(domain.vertexlist))
274            else:
275                fid.createDimension('number_of_points', 3*self.number_of_volumes)
276
277            fid.createDimension('number_of_timesteps', None) #extensible
278
279            # variable definitions
280            fid.createVariable('x', self.precision, ('number_of_points',))
281            fid.createVariable('y', self.precision, ('number_of_points',))
282            fid.createVariable('elevation', self.precision, ('number_of_points',))
283            if domain.geo_reference is not None:
284                domain.geo_reference.write_NetCDF(fid)
285               
286            #FIXME: Backwards compatibility
287            fid.createVariable('z', self.precision, ('number_of_points',))
288            #################################
289
290            fid.createVariable('volumes', Int, ('number_of_volumes',
291                                                'number_of_vertices'))
292
293            fid.createVariable('time', self.precision,
294                               ('number_of_timesteps',))
295
296            fid.createVariable('stage', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('xmomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304            fid.createVariable('ymomentum', self.precision,
305                               ('number_of_timesteps',
306                                'number_of_points'))
307
308        #Close
309        fid.close()
310
311
312    def store_connectivity(self):
313        """Specialisation of store_connectivity for net CDF format
314
315        Writes x,y,z coordinates of triangles constituting
316        the bed elevation.
317        """
318
319        from Scientific.IO.NetCDF import NetCDFFile
320
321        from Numeric import concatenate, Int
322
323        domain = self.domain
324
325        #Get NetCDF
326        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
327
328        # Get the variables
329        x = fid.variables['x']
330        y = fid.variables['y']
331        z = fid.variables['elevation']
332
333        volumes = fid.variables['volumes']
334
335        # Get X, Y and bed elevation Z
336        Q = domain.quantities['elevation']
337        X,Y,Z,V = Q.get_vertex_values(xy=True,
338                                      precision = self.precision)
339
340
341
342        x[:] = X.astype(self.precision)
343        y[:] = Y.astype(self.precision)
344        z[:] = Z.astype(self.precision)
345
346        #FIXME: Backwards compatibility
347        z = fid.variables['z']
348        z[:] = Z.astype(self.precision)
349        ################################
350
351        volumes[:] = V.astype(volumes.typecode())
352
353        #Close
354        fid.close()
355
356    def store_timestep(self, names):
357        """Store time and named quantities to file
358        """
359        from Scientific.IO.NetCDF import NetCDFFile
360        import types
361        from time import sleep
362        from os import stat
363
364        minimum_allowed_depth = 0.001
365        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
366        from Numeric import choose
367
368        #Get NetCDF
369        retries = 0
370        file_open = False
371        while not file_open and retries < 10:
372            try:
373                fid = NetCDFFile(self.filename, 'a') #Open existing file
374            except IOError:
375                #This could happen if someone was reading the file.
376                #In that case, wait a while and try again
377                msg = 'Warning (store_timestep): File %s could not be opened'\
378                      %self.filename
379                msg += ' - trying step %s again' %self.domain.time
380                print msg
381                retries += 1
382                sleep(1)
383            else:
384                file_open = True
385
386        if not file_open:
387            msg = 'File %s could not be opened for append' %self.filename
388            raise msg
389
390
391
392        #Check to see if the file is already too big:
393        time = fid.variables['time']
394        i = len(time)+1
395        file_size = stat(self.filename)[6]
396        file_size_increase =  file_size/i
397        if file_size + file_size_increase > self.max_size*(2**self.recursion):
398            #in order to get the file name and start time correct,
399            #I change the domian.filename and domain.starttime.
400            #This is the only way to do this without changing
401            #other modules (I think).
402
403            #write a filename addon that won't break swollens reader
404            #(10.sww is bad)
405            filename_ext = '_time_%s'%self.domain.time
406            filename_ext = filename_ext.replace('.', '_')
407            #remember the old filename, then give domain a
408            #name with the extension
409            old_domain_filename = self.domain.filename
410            if not self.recursion:
411                self.domain.filename = self.domain.filename+filename_ext
412
413            #change the domain starttime to the current time
414            old_domain_starttime = self.domain.starttime
415            self.domain.starttime = self.domain.time
416
417            #build a new data_structure.
418            next_data_structure=\
419                Data_format_sww(self.domain, mode=self.mode,\
420                                max_size = self.max_size,\
421                                recursion = self.recursion+1)
422            if not self.recursion:
423                print '    file_size = %s'%file_size
424                print '    saving file to %s'%next_data_structure.filename
425            #set up the new data_structure
426            self.domain.writer = next_data_structure
427
428            #FIXME - could be cleaner to use domain.store_timestep etc.
429            next_data_structure.store_connectivity()
430            next_data_structure.store_timestep(names)
431            fid.sync()
432            fid.close()
433
434            #restore the old starttime and filename
435            self.domain.starttime = old_domain_starttime
436            self.domain.filename = old_domain_filename
437        else:
438            self.recursion = False
439            domain = self.domain
440
441            # Get the variables
442            time = fid.variables['time']
443            stage = fid.variables['stage']
444            xmomentum = fid.variables['xmomentum']
445            ymomentum = fid.variables['ymomentum']
446            i = len(time)
447
448            #Store time
449            time[i] = self.domain.time
450
451
452            if type(names) not in [types.ListType, types.TupleType]:
453                names = [names]
454
455            for name in names:
456                # Get quantity
457                Q = domain.quantities[name]
458                A,V = Q.get_vertex_values(xy = False,
459                                          precision = self.precision)
460
461                #FIXME: Make this general (see below)
462                if name == 'stage':
463                    z = fid.variables['elevation']
464                    #print z[:]
465                    #print A-z[:]
466                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
467                    stage[i,:] = A.astype(self.precision)
468                elif name == 'xmomentum':
469                    xmomentum[i,:] = A.astype(self.precision)
470                elif name == 'ymomentum':
471                    ymomentum[i,:] = A.astype(self.precision)
472
473                #As in....
474                #eval( name + '[i,:] = A.astype(self.precision)' )
475                #FIXME: But we need a UNIT test for that before refactoring
476
477
478
479            #Flush and close
480            fid.sync()
481            fid.close()
482
483
484
485#Class for handling checkpoints data
486class Data_format_cpt(Data_format):
487    """Interface to native NetCDF format (.cpt)
488    """
489
490
491    def __init__(self, domain, mode = 'w'):
492        from Scientific.IO.NetCDF import NetCDFFile
493        from Numeric import Int, Float, Float
494
495        self.precision = Float #Use full precision
496
497        Data_format.__init__(self, domain, 'sww', mode)
498
499
500        # NetCDF file definition
501        fid = NetCDFFile(self.filename, mode)
502
503        if mode == 'w':
504            #Create new file
505            fid.institution = 'Geoscience Australia'
506            fid.description = 'Checkpoint data'
507            #fid.smooth = domain.smooth
508            fid.order = domain.default_order
509
510            # dimension definitions
511            fid.createDimension('number_of_volumes', self.number_of_volumes)
512            fid.createDimension('number_of_vertices', 3)
513
514            #Store info at all vertices (no smoothing)
515            fid.createDimension('number_of_points', 3*self.number_of_volumes)
516            fid.createDimension('number_of_timesteps', None) #extensible
517
518            # variable definitions
519
520            #Mesh
521            fid.createVariable('x', self.precision, ('number_of_points',))
522            fid.createVariable('y', self.precision, ('number_of_points',))
523
524
525            fid.createVariable('volumes', Int, ('number_of_volumes',
526                                                'number_of_vertices'))
527
528            fid.createVariable('time', self.precision,
529                               ('number_of_timesteps',))
530
531            #Allocate space for all quantities
532            for name in domain.quantities.keys():
533                fid.createVariable(name, self.precision,
534                                   ('number_of_timesteps',
535                                    'number_of_points'))
536
537        #Close
538        fid.close()
539
540
541    def store_checkpoint(self):
542        """
543        Write x,y coordinates of triangles.
544        Write connectivity (
545        constituting
546        the bed elevation.
547        """
548
549        from Scientific.IO.NetCDF import NetCDFFile
550
551        from Numeric import concatenate
552
553        domain = self.domain
554
555        #Get NetCDF
556        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
557
558        # Get the variables
559        x = fid.variables['x']
560        y = fid.variables['y']
561
562        volumes = fid.variables['volumes']
563
564        # Get X, Y and bed elevation Z
565        Q = domain.quantities['elevation']
566        X,Y,Z,V = Q.get_vertex_values(xy=True,
567                                      precision = self.precision)
568
569
570
571        x[:] = X.astype(self.precision)
572        y[:] = Y.astype(self.precision)
573        z[:] = Z.astype(self.precision)
574
575        volumes[:] = V
576
577        #Close
578        fid.close()
579
580
581    def store_timestep(self, name):
582        """Store time and named quantity to file
583        """
584        from Scientific.IO.NetCDF import NetCDFFile
585        from time import sleep
586
587        #Get NetCDF
588        retries = 0
589        file_open = False
590        while not file_open and retries < 10:
591            try:
592                fid = NetCDFFile(self.filename, 'a') #Open existing file
593            except IOError:
594                #This could happen if someone was reading the file.
595                #In that case, wait a while and try again
596                msg = 'Warning (store_timestep): File %s could not be opened'\
597                          %self.filename
598                msg += ' - trying again'
599                print msg
600                retries += 1
601                sleep(1)
602            else:
603                file_open = True
604
605            if not file_open:
606                msg = 'File %s could not be opened for append' %self.filename
607            raise msg
608
609
610        domain = self.domain
611
612        # Get the variables
613        time = fid.variables['time']
614        stage = fid.variables['stage']
615        i = len(time)
616
617        #Store stage
618        time[i] = self.domain.time
619
620        # Get quantity
621        Q = domain.quantities[name]
622        A,V = Q.get_vertex_values(xy=False,
623                                  precision = self.precision)
624
625        stage[i,:] = A.astype(self.precision)
626
627        #Flush and close
628        fid.sync()
629        fid.close()
630
631
632
633
634
635#Function for storing xya output
636#FIXME Not done yet for this version
637class Data_format_xya(Data_format):
638    """Generic interface to data formats
639    """
640
641
642    def __init__(self, domain, mode = 'w'):
643        from Scientific.IO.NetCDF import NetCDFFile
644        from Numeric import Int, Float, Float32
645
646        self.precision = Float32 #Use single precision
647
648        Data_format.__init__(self, domain, 'xya', mode)
649
650
651
652    #FIXME -This is the old xya format
653    def store_all(self):
654        """Specialisation of store all for xya format
655
656        Writes x,y,z coordinates of triangles constituting
657        the bed elevation.
658        """
659
660        from Numeric import concatenate
661
662        domain = self.domain
663
664        fd = open(self.filename, 'w')
665
666
667        if domain.smooth is True:
668            number_of_points =  len(domain.vertexlist)
669        else:
670            number_of_points = 3*self.number_of_volumes
671
672        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
673
674        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
675                 " # <vertex #> <x> <y> [attributes]" + "\n")
676
677
678        # Get X, Y, bed elevation and friction (index=0,1)
679        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
680                                           indices = (0,1), precision = self.precision)
681
682        bed_eles = A[:,0]
683        fricts = A[:,1]
684
685        # Get stage (index=0)
686        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
687                                       indices = (0,), precision = self.precision)
688
689        stages = B[:,0]
690
691        #<vertex #> <x> <y> [attributes]
692        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
693                                               stages, fricts):
694
695            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
696            fd.write(s)
697
698        #close
699        fd.close()
700
701
702    def store_timestep(self, t, V0, V1, V2):
703        """Store time, water heights (and momentums) to file
704        """
705        pass
706
707
708#Auxiliary
709def write_obj(filename,x,y,z):
710    """Store x,y,z vectors into filename (obj format)
711       Vectors are assumed to have dimension (M,3) where
712       M corresponds to the number elements.
713       triangles are assumed to be disconnected
714
715       The three numbers in each vector correspond to three vertices,
716
717       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
718
719    """
720    #print 'Writing obj to %s' % filename
721
722    import os.path
723
724    root, ext = os.path.splitext(filename)
725    if ext == '.obj':
726        FN = filename
727    else:
728        FN = filename + '.obj'
729
730
731    outfile = open(FN, 'wb')
732    outfile.write("# Triangulation as an obj file\n")
733
734    M, N = x.shape
735    assert N==3  #Assuming three vertices per element
736
737    for i in range(M):
738        for j in range(N):
739            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
740
741    for i in range(M):
742        base = i*N
743        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
744
745    outfile.close()
746
747
748#########################################################
749#Conversion routines
750########################################################
751
752def sww2obj(basefilename, size):
753    """Convert netcdf based data output to obj
754    """
755    from Scientific.IO.NetCDF import NetCDFFile
756
757    from Numeric import Float, zeros
758
759    #Get NetCDF
760    FN = create_filename('.', basefilename, 'sww', size)
761    print 'Reading from ', FN
762    fid = NetCDFFile(FN, 'r')  #Open existing file for read
763
764
765    # Get the variables
766    x = fid.variables['x']
767    y = fid.variables['y']
768    z = fid.variables['elevation']
769    time = fid.variables['time']
770    stage = fid.variables['stage']
771
772    M = size  #Number of lines
773    xx = zeros((M,3), Float)
774    yy = zeros((M,3), Float)
775    zz = zeros((M,3), Float)
776
777    for i in range(M):
778        for j in range(3):
779            xx[i,j] = x[i+j*M]
780            yy[i,j] = y[i+j*M]
781            zz[i,j] = z[i+j*M]
782
783    #Write obj for bathymetry
784    FN = create_filename('.', basefilename, 'obj', size)
785    write_obj(FN,xx,yy,zz)
786
787
788    #Now read all the data with variable information, combine with
789    #x,y info and store as obj
790
791    for k in range(len(time)):
792        t = time[k]
793        print 'Processing timestep %f' %t
794
795        for i in range(M):
796            for j in range(3):
797                zz[i,j] = stage[k,i+j*M]
798
799
800        #Write obj for variable data
801        #FN = create_filename(basefilename, 'obj', size, time=t)
802        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
803        write_obj(FN,xx,yy,zz)
804
805
806def dat2obj(basefilename):
807    """Convert line based data output to obj
808    FIXME: Obsolete?
809    """
810
811    import glob, os
812    from config import data_dir
813
814
815    #Get bathymetry and x,y's
816    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
817
818    from Numeric import zeros, Float
819
820    M = len(lines)  #Number of lines
821    x = zeros((M,3), Float)
822    y = zeros((M,3), Float)
823    z = zeros((M,3), Float)
824
825    ##i = 0
826    for i, line in enumerate(lines):
827        tokens = line.split()
828        values = map(float,tokens)
829
830        for j in range(3):
831            x[i,j] = values[j*3]
832            y[i,j] = values[j*3+1]
833            z[i,j] = values[j*3+2]
834
835        ##i += 1
836
837
838    #Write obj for bathymetry
839    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
840
841
842    #Now read all the data files with variable information, combine with
843    #x,y info
844    #and store as obj
845
846    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
847
848    for filename in files:
849        print 'Processing %s' % filename
850
851        lines = open(data_dir+os.sep+filename,'r').readlines()
852        assert len(lines) == M
853        root, ext = os.path.splitext(filename)
854
855        #Get time from filename
856        i0 = filename.find('_time=')
857        if i0 == -1:
858            #Skip bathymetry file
859            continue
860
861        i0 += 6  #Position where time starts
862        i1 = filename.find('.dat')
863
864        if i1 > i0:
865            t = float(filename[i0:i1])
866        else:
867            raise 'Hmmmm'
868
869
870
871        ##i = 0
872        for i, line in enumerate(lines):
873            tokens = line.split()
874            values = map(float,tokens)
875
876            for j in range(3):
877                z[i,j] = values[j]
878
879            ##i += 1
880
881        #Write obj for variable data
882        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
883
884
885def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
886    """Read netcdf filename1, pick timesteps first:step:last and save to
887    nettcdf file filename2
888    """
889    from Scientific.IO.NetCDF import NetCDFFile
890
891    #Get NetCDF
892    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
893    outfile = NetCDFFile(filename2, 'w')  #Open new file
894
895
896    #Copy dimensions
897    for d in infile.dimensions:
898        outfile.createDimension(d, infile.dimensions[d])
899
900    for name in infile.variables:
901        var = infile.variables[name]
902        outfile.createVariable(name, var.typecode(), var.dimensions)
903
904
905    #Copy the static variables
906    for name in infile.variables:
907        if name == 'time' or name == 'stage':
908            pass
909        else:
910            #Copy
911            outfile.variables[name][:] = infile.variables[name][:]
912
913    #Copy selected timesteps
914    time = infile.variables['time']
915    stage = infile.variables['stage']
916
917    newtime = outfile.variables['time']
918    newstage = outfile.variables['stage']
919
920    if last is None:
921        last = len(time)
922
923    selection = range(first, last, step)
924    for i, j in enumerate(selection):
925        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
926        newtime[i] = time[j]
927        newstage[i,:] = stage[j,:]
928
929    #Close
930    infile.close()
931    outfile.close()
932
933
934#Get data objects
935def get_dataobject(domain, mode='w'):
936    """Return instance of class of given format using filename
937    """
938
939    cls = eval('Data_format_%s' %domain.format)
940    return cls(domain, mode)
941
942def xya2pts(basename_in, basename_out=None, verbose=False,
943            #easting_min=None, easting_max=None,
944            #northing_min=None, northing_max=None,
945            stride = 1,           
946            attribute_name = 'elevation',
947            z_func = None):
948    """Read points data from ascii (.xya)
949
950    Example:
951
952              x(m)        y(m)        z(m)
953         0.00000e+00  0.00000e+00  1.3535000e-01
954         0.00000e+00  1.40000e-02  1.3535000e-01
955
956   
957
958    Convert to NetCDF pts format which is
959
960    points:  (Nx2) Float array
961    elevation: N Float array
962
963    Only lines that contain three numeric values are processed
964
965    If z_func is specified, it will be applied to the third field
966    """
967
968    import os 
969    #from Scientific.IO.NetCDF import NetCDFFile
970    from Numeric import Float, arrayrange, concatenate
971
972    root, ext = os.path.splitext(basename_in)
973
974    if ext == '': ext = '.xya'
975
976    #Get NetCDF
977    infile = open(root + ext, 'r')  #Open existing xya file for read
978
979    if verbose: print 'Reading xya points from %s' %(root + ext)
980
981    points = []
982    attribute = []
983    for i, line in enumerate(infile.readlines()):
984
985        if i % stride != 0: continue
986       
987        fields = line.split()
988
989        try:
990            assert len(fields) == 3
991        except:   
992            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
993
994        try:
995            x = float( fields[0] )
996            y = float( fields[1] )
997            z = float( fields[2] )           
998        except:
999            continue
1000
1001        points.append( [x, y] )
1002
1003        if callable(z_func):
1004            attribute.append(z_func(z))           
1005        else:   
1006            attribute.append(z)
1007
1008
1009    #Get output file
1010    if basename_out == None:
1011        ptsname = root + '.pts'
1012    else:
1013        ptsname = basename_out + '.pts'
1014
1015    if verbose: print 'Store to NetCDF file %s' %ptsname
1016    write_ptsfile(ptsname, points, attribute, attribute_name)
1017
1018
1019def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1020    """Write points and associated attribute to pts (NetCDF) format
1021    """
1022
1023    from Numeric import Float
1024   
1025    if attribute_name is None:
1026        attribute_name = 'attribute'       
1027   
1028
1029    from Scientific.IO.NetCDF import NetCDFFile
1030   
1031    # NetCDF file definition
1032    outfile = NetCDFFile(ptsname, 'w')
1033
1034
1035    #Create new file
1036    outfile.institution = 'Geoscience Australia'
1037    outfile.description = 'NetCDF pts format for compact and '\
1038                          'portable storage of spatial point data'
1039   
1040
1041    #Georeferencing
1042    from coordinate_transforms.geo_reference import Geo_reference
1043    Geo_reference().write_NetCDF(outfile)
1044   
1045
1046    outfile.createDimension('number_of_points', len(points))
1047    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1048
1049    # variable definitions
1050    outfile.createVariable('points', Float, ('number_of_points',
1051                                             'number_of_dimensions'))
1052    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1053
1054    # Get handles to the variables
1055    nc_points = outfile.variables['points']
1056    nc_attribute = outfile.variables[attribute_name]
1057
1058    #Store data
1059    nc_points[:, :] = points
1060    nc_attribute[:] = attribute
1061
1062    outfile.close()
1063   
1064
1065def dem2pts(basename_in, basename_out=None, verbose=False,
1066            easting_min=None, easting_max=None,
1067            northing_min=None, northing_max=None):
1068    """Read Digitial Elevation model from the following NetCDF format (.dem)
1069
1070    Example:
1071
1072    ncols         3121
1073    nrows         1800
1074    xllcorner     722000
1075    yllcorner     5893000
1076    cellsize      25
1077    NODATA_value  -9999
1078    138.3698 137.4194 136.5062 135.5558 ..........
1079
1080    Convert to NetCDF pts format which is
1081
1082    points:  (Nx2) Float array
1083    elevation: N Float array
1084    """
1085
1086    #FIXME: Can this be written feasibly using write_pts?
1087
1088    import os
1089    from Scientific.IO.NetCDF import NetCDFFile
1090    from Numeric import Float, zeros, reshape
1091
1092    root = basename_in
1093
1094    #Get NetCDF
1095    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1096
1097    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1098
1099    ncols = infile.ncols[0]
1100    nrows = infile.nrows[0]
1101    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1102    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1103    cellsize = infile.cellsize[0]
1104    NODATA_value = infile.NODATA_value[0]
1105    dem_elevation = infile.variables['elevation']
1106
1107    zone = infile.zone[0]
1108    false_easting = infile.false_easting[0]
1109    false_northing = infile.false_northing[0]
1110
1111    #Text strings
1112    projection = infile.projection
1113    datum = infile.datum
1114    units = infile.units
1115
1116
1117    #Get output file
1118    if basename_out == None:
1119        ptsname = root + '.pts'
1120    else:
1121        ptsname = basename_out + '.pts'
1122
1123    if verbose: print 'Store to NetCDF file %s' %ptsname
1124    # NetCDF file definition
1125    outfile = NetCDFFile(ptsname, 'w')
1126
1127    #Create new file
1128    outfile.institution = 'Geoscience Australia'
1129    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1130                      'of spatial point data'
1131    #assign default values
1132    if easting_min is None: easting_min = xllcorner
1133    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1134    if northing_min is None: northing_min = yllcorner
1135    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1136
1137    #compute offsets to update georeferencing
1138    easting_offset = xllcorner - easting_min
1139    northing_offset = yllcorner - northing_min
1140
1141    #Georeferencing
1142    outfile.zone = zone
1143    outfile.xllcorner = easting_min #Easting of lower left corner
1144    outfile.yllcorner = northing_min #Northing of lower left corner
1145    outfile.false_easting = false_easting
1146    outfile.false_northing = false_northing
1147
1148    outfile.projection = projection
1149    outfile.datum = datum
1150    outfile.units = units
1151
1152
1153    #Grid info (FIXME: probably not going to be used, but heck)
1154    outfile.ncols = ncols
1155    outfile.nrows = nrows
1156
1157
1158    # dimension definitions
1159    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1160    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1161    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1162    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1163
1164    # variable definitions
1165    outfile.createVariable('points', Float, ('number_of_points',
1166                                             'number_of_dimensions'))
1167    outfile.createVariable('elevation', Float, ('number_of_points',))
1168
1169    # Get handles to the variables
1170    points = outfile.variables['points']
1171    elevation = outfile.variables['elevation']
1172
1173    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1174
1175    #Store data
1176    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1177    global_index = 0
1178    for i in range(nrows):
1179        if verbose and i%((nrows+10)/10)==0:
1180            print 'Processing row %d of %d' %(i, nrows)       
1181       
1182        lower_index = global_index
1183        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1184        telev =  zeros(ncols_in_bounding_box, Float)
1185        local_index = 0
1186
1187        y = (nrows-i)*cellsize + yllcorner
1188        for j in range(ncols):
1189
1190            x = j*cellsize + xllcorner
1191            if easting_min <= x <= easting_max and \
1192               northing_min <= y <= northing_max:
1193                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1194                telev[local_index] = dem_elevation_r[i, j]
1195                global_index += 1
1196                local_index += 1
1197
1198        upper_index = global_index
1199
1200        if upper_index == lower_index + ncols_in_bounding_box:
1201            points[lower_index:upper_index, :] = tpoints
1202            elevation[lower_index:upper_index] = telev
1203
1204    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1205
1206    infile.close()
1207    outfile.close()
1208
1209
1210
1211def _read_hecras_cross_sections(lines):
1212    """Return block of surface lines for each cross section
1213    Starts with SURFACE LINE,
1214    Ends with END CROSS-SECTION 
1215    """
1216
1217    points = []
1218
1219    reading_surface = False
1220    for i, line in enumerate(lines): 
1221       
1222        if len(line.strip()) == 0:    #Ignore blanks
1223            continue                       
1224
1225        if lines[i].strip().startswith('SURFACE LINE'):
1226            reading_surface = True
1227            continue
1228
1229        if lines[i].strip().startswith('END') and reading_surface:
1230            yield points
1231            reading_surface = False
1232            points = []
1233
1234        if reading_surface:
1235            fields = line.strip().split(',')
1236            easting = float(fields[0])
1237            northing = float(fields[1])
1238            elevation = float(fields[2])                       
1239            points.append([easting, northing, elevation])               
1240
1241
1242
1243
1244def hecras_cross_sections2pts(basename_in,
1245                              basename_out=None,
1246                              verbose=False):
1247    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1248   
1249    Example:
1250   
1251
1252# RAS export file created on Mon 15Aug2005 11:42
1253# by HEC-RAS Version 3.1.1
1254
1255BEGIN HEADER:
1256  UNITS: METRIC
1257  DTM TYPE: TIN
1258  DTM: v:\1\cit\perth_topo\river_tin
1259  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1260  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1261  MAP PROJECTION: UTM
1262  PROJECTION ZONE: 50
1263  DATUM: AGD66
1264  VERTICAL DATUM:
1265  NUMBER OF REACHES:  19
1266  NUMBER OF CROSS-SECTIONS:  14206
1267END HEADER:
1268
1269
1270Only the SURFACE LINE data of the following form will be utilised
1271
1272  CROSS-SECTION:
1273    STREAM ID:Southern-Wungong
1274    REACH ID:Southern-Wungong
1275    STATION:19040.*
1276    CUT LINE:
1277      405548.671603161 , 6438142.7594925
1278      405734.536092045 , 6438326.10404912
1279      405745.130459356 , 6438331.48627354
1280      405813.89633823 , 6438368.6272789
1281    SURFACE LINE:
1282     405548.67,   6438142.76,   35.37
1283     405552.24,   6438146.28,   35.41
1284     405554.78,   6438148.78,   35.44
1285     405555.80,   6438149.79,   35.44
1286     405559.37,   6438153.31,   35.45
1287     405560.88,   6438154.81,   35.44
1288     405562.93,   6438156.83,   35.42
1289     405566.50,   6438160.35,   35.38
1290     405566.99,   6438160.83,   35.37
1291     ...
1292   END CROSS-SECTION 
1293
1294    Convert to NetCDF pts format which is
1295
1296    points:  (Nx2) Float array
1297    elevation: N Float array
1298    """
1299
1300    #FIXME: Can this be written feasibly using write_pts?
1301
1302    import os
1303    from Scientific.IO.NetCDF import NetCDFFile
1304    from Numeric import Float, zeros, reshape
1305
1306    root = basename_in
1307
1308    #Get ASCII file
1309    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1310
1311    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1312
1313    lines = infile.readlines()
1314    infile.close()
1315
1316    if verbose: print 'Converting to pts format'
1317
1318    i = 0
1319    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1320        i += 1
1321
1322    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1323    i += 1
1324   
1325    assert lines[i].strip().upper().startswith('UNITS:')
1326    units = lines[i].strip().split()[1]
1327    i += 1   
1328
1329    assert lines[i].strip().upper().startswith('DTM TYPE:')   
1330    i += 1
1331
1332    assert lines[i].strip().upper().startswith('DTM:')   
1333    i += 1           
1334
1335    assert lines[i].strip().upper().startswith('STREAM')   
1336    i += 1
1337
1338    assert lines[i].strip().upper().startswith('CROSS')   
1339    i += 1
1340
1341    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1342    projection = lines[i].strip().split(':')[1]
1343    i += 1
1344
1345    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1346    zone = int(lines[i].strip().split(':')[1])
1347    i += 1
1348
1349    assert lines[i].strip().upper().startswith('DATUM:')
1350    datum = lines[i].strip().split(':')[1]
1351    i += 1
1352
1353    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1354    i += 1
1355
1356    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:') 
1357    i += 1
1358
1359    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1360    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1361    i += 1                       
1362
1363
1364    #Now read all points
1365    points = []
1366    elevation = []
1367    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1368        for k, entry in enumerate(entries):
1369            points.append(entry[:2])
1370            elevation.append(entry[2])           
1371
1372
1373    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1374          %(j+1, number_of_cross_sections)         
1375    assert j+1 == number_of_cross_sections, msg
1376   
1377    #Get output file
1378    if basename_out == None:
1379        ptsname = root + '.pts'
1380    else:
1381        ptsname = basename_out + '.pts'
1382
1383    #FIXME (DSG-ON): use loadASCII export_points_file
1384    if verbose: print 'Store to NetCDF file %s' %ptsname
1385    # NetCDF file definition
1386    outfile = NetCDFFile(ptsname, 'w')
1387
1388    #Create new file
1389    outfile.institution = 'Geoscience Australia'
1390    outfile.description = 'NetCDF pts format for compact and portable ' +\
1391                          'storage of spatial point data derived from HEC-RAS'
1392   
1393    #Georeferencing
1394    outfile.zone = zone
1395    outfile.xllcorner = 0.0
1396    outfile.yllcorner = 0.0
1397    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1398    outfile.false_northing = 1000000
1399
1400    outfile.projection = projection
1401    outfile.datum = datum
1402    outfile.units = units
1403
1404
1405    outfile.createDimension('number_of_points', len(points))
1406    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1407
1408    # variable definitions
1409    outfile.createVariable('points', Float, ('number_of_points',
1410                                             'number_of_dimensions'))
1411    outfile.createVariable('elevation', Float, ('number_of_points',))
1412
1413    # Get handles to the variables
1414    outfile.variables['points'][:] = points
1415    outfile.variables['elevation'][:] = elevation
1416
1417    outfile.close()
1418
1419
1420
1421def sww2dem(basename_in, basename_out = None,
1422            quantity = None,
1423            timestep = None,
1424            reduction = None,
1425            cellsize = 10,
1426            easting_min = None,
1427            easting_max = None,
1428            northing_min = None,
1429            northing_max = None,
1430            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)           
1431            verbose = False,
1432            origin = None,
1433            datum = 'WGS84',
1434            format = 'ers'):
1435   
1436    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1437
1438    Example (ASC):
1439
1440    ncols         3121
1441    nrows         1800
1442    xllcorner     722000
1443    yllcorner     5893000
1444    cellsize      25
1445    NODATA_value  -9999
1446    138.3698 137.4194 136.5062 135.5558 ..........
1447
1448    Also write accompanying file with same basename_in but extension .prj
1449    used to fix the UTM zone, datum, false northings and eastings.
1450
1451    The prj format is assumed to be as
1452
1453    Projection    UTM
1454    Zone          56
1455    Datum         WGS84
1456    Zunits        NO
1457    Units         METERS
1458    Spheroid      WGS84
1459    Xshift        0.0000000000
1460    Yshift        10000000.0000000000
1461    Parameters
1462
1463
1464    The parameter quantity must be the name of an existing quantity or
1465    an expression involving existing quantities. The default is
1466    'elevation'.
1467
1468    if timestep (an index) is given, output quantity at that timestep
1469
1470    if reduction is given use that to reduce quantity over all timesteps.
1471
1472    datum
1473   
1474    format can be either 'asc' or 'ers'
1475    """
1476
1477    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1478    from utilities.polygon import inside_polygon
1479    from util import apply_expression_to_dictionary
1480   
1481    msg = 'Format must be either asc or ers'
1482    assert format.lower() in ['asc', 'ers'], msg
1483   
1484    false_easting = 500000
1485    false_northing = 10000000
1486
1487    if quantity is None:
1488        quantity = 'elevation'
1489
1490    if reduction is None:
1491        reduction = max
1492
1493    if basename_out is None:
1494        basename_out = basename_in + '_%s' %quantity
1495 
1496    swwfile = basename_in + '.sww'
1497    demfile = basename_out + '.' + format
1498    # Note the use of a .ers extension is optional (write_ermapper_grid will
1499    # deal with either option
1500
1501    #Read sww file
1502    if verbose: print 'Reading from %s' %swwfile
1503    from Scientific.IO.NetCDF import NetCDFFile
1504    fid = NetCDFFile(swwfile)
1505
1506    #Get extent and reference
1507    x = fid.variables['x'][:]
1508    y = fid.variables['y'][:]
1509    volumes = fid.variables['volumes'][:]
1510
1511    number_of_timesteps = fid.dimensions['number_of_timesteps']
1512    number_of_points = fid.dimensions['number_of_points']
1513    if origin is None:
1514
1515        #Get geo_reference
1516        #sww files don't have to have a geo_ref
1517        try:
1518            geo_reference = Geo_reference(NetCDFObject=fid)
1519        except AttributeError, e:
1520            geo_reference = Geo_reference() #Default georef object
1521
1522        xllcorner = geo_reference.get_xllcorner()
1523        yllcorner = geo_reference.get_yllcorner()
1524        zone = geo_reference.get_zone()
1525    else:
1526        zone = origin[0]
1527        xllcorner = origin[1]
1528        yllcorner = origin[2]
1529
1530
1531
1532    #FIXME: Refactor using code from file_function.statistics
1533    #Something like print swwstats(swwname)
1534    if verbose:
1535        x = fid.variables['x'][:]
1536        y = fid.variables['y'][:]
1537        times = fid.variables['time'][:]
1538        print '------------------------------------------------'
1539        print 'Statistics of SWW file:'
1540        print '  Name: %s' %swwfile
1541        print '  Reference:'
1542        print '    Lower left corner: [%f, %f]'\
1543              %(xllcorner, yllcorner)
1544        print '    Start time: %f' %fid.starttime[0]
1545        print '  Extent:'
1546        print '    x [m] in [%f, %f], len(x) == %d'\
1547              %(min(x.flat), max(x.flat), len(x.flat))
1548        print '    y [m] in [%f, %f], len(y) == %d'\
1549              %(min(y.flat), max(y.flat), len(y.flat))
1550        print '    t [s] in [%f, %f], len(t) == %d'\
1551              %(min(times), max(times), len(times))
1552        print '  Quantities [SI units]:'
1553        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1554            q = fid.variables[name][:].flat
1555            print '    %s in [%f, %f]' %(name, min(q), max(q))
1556
1557
1558
1559
1560
1561    #Get quantity and reduce if applicable
1562    if verbose: print 'Processing quantity %s' %quantity
1563
1564    #Turn NetCDF objects into Numeric arrays
1565    quantity_dict = {}
1566    for name in fid.variables.keys():
1567        quantity_dict[name] = fid.variables[name][:]
1568
1569
1570    q = apply_expression_to_dictionary(quantity, quantity_dict)
1571
1572
1573
1574    if len(q.shape) == 2:
1575        #q has a time component and needs to be reduced along
1576        #the temporal dimension
1577        if verbose: print 'Reducing quantity %s' %quantity
1578        q_reduced = zeros( number_of_points, Float )
1579
1580        for k in range(number_of_points):
1581            q_reduced[k] = reduction( q[:,k] )
1582
1583        q = q_reduced
1584
1585    #Post condition: Now q has dimension: number_of_points
1586    assert len(q.shape) == 1
1587    assert q.shape[0] == number_of_points   
1588   
1589
1590    if verbose:
1591        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1592   
1593
1594    #Create grid and update xll/yll corner and x,y
1595
1596    #Relative extent
1597    if easting_min is None:
1598        xmin = min(x)
1599    else:
1600        xmin = easting_min - xllcorner
1601
1602    if easting_max is None:
1603        xmax = max(x)
1604    else:
1605        xmax = easting_max - xllcorner
1606       
1607    if northing_min is None:       
1608        ymin = min(y)
1609    else:
1610        ymin = northing_min - yllcorner
1611       
1612    if northing_max is None:       
1613        ymax = max(y)
1614    else:
1615        ymax = northing_max - yllcorner
1616       
1617       
1618   
1619    if verbose: print 'Creating grid'
1620    ncols = int((xmax-xmin)/cellsize)+1
1621    nrows = int((ymax-ymin)/cellsize)+1
1622
1623
1624    #New absolute reference and coordinates
1625    newxllcorner = xmin+xllcorner
1626    newyllcorner = ymin+yllcorner
1627
1628    x = x+xllcorner-newxllcorner
1629    y = y+yllcorner-newyllcorner
1630
1631    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1632    assert len(vertex_points.shape) == 2
1633
1634
1635    from Numeric import zeros, Float
1636    grid_points = zeros ( (ncols*nrows, 2), Float )
1637
1638
1639    for i in xrange(nrows):
1640        if format.lower() == 'asc':
1641            yg = i*cellsize
1642        else:   
1643            #this will flip the order of the y values for ers
1644            yg = (nrows-i)*cellsize
1645           
1646        for j in xrange(ncols):
1647            xg = j*cellsize
1648            k = i*ncols + j
1649
1650            grid_points[k,0] = xg
1651            grid_points[k,1] = yg
1652
1653    #Interpolate
1654    from least_squares import Interpolation
1655   
1656
1657    #FIXME: This should be done with precrop = True (?), otherwise it'll
1658    #take forever. With expand_search  set to False, some grid points might
1659    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1660
1661    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1662                           precrop = False,
1663                           expand_search = expand_search,
1664                           verbose = verbose)
1665
1666   
1667
1668    #Interpolate using quantity values
1669    if verbose: print 'Interpolating'
1670    grid_values = interp.interpolate(q).flat
1671
1672    if verbose:
1673        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1674                                                      max(grid_values))
1675
1676    if format.lower() == 'ers':
1677        # setup ERS header information
1678        grid_values = reshape(grid_values,(nrows, ncols))   
1679        NODATA_value = 0
1680        header = {}
1681        header['datum'] = '"' + datum + '"'
1682        # FIXME The use of hardwired UTM and zone number needs to be made optional
1683        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1684        header['projection'] = '"UTM-' + str(zone) + '"'
1685        header['coordinatetype'] = 'EN'
1686        if header['coordinatetype'] == 'LL':
1687            header['longitude'] = str(newxllcorner)
1688            header['latitude'] = str(newyllcorner)
1689        elif header['coordinatetype'] == 'EN':
1690            header['eastings'] = str(newxllcorner)
1691            header['northings'] = str(newyllcorner)
1692        header['nullcellvalue'] = str(NODATA_value)
1693        header['xdimension'] = str(cellsize)
1694        header['ydimension'] = str(cellsize)
1695        header['value'] = '"' + quantity + '"'
1696
1697
1698        #Write
1699        if verbose: print 'Writing %s' %demfile
1700        import ermapper_grids
1701        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1702
1703        fid.close()   
1704    else:
1705        #Write to Ascii format
1706       
1707        #Write prj file
1708        prjfile = basename_out + '.prj' 
1709       
1710        if verbose: print 'Writing %s' %prjfile
1711        prjid = open(prjfile, 'w')
1712        prjid.write('Projection    %s\n' %'UTM')
1713        prjid.write('Zone          %d\n' %zone)
1714        prjid.write('Datum         %s\n' %datum)
1715        prjid.write('Zunits        NO\n')
1716        prjid.write('Units         METERS\n')
1717        prjid.write('Spheroid      %s\n' %datum)
1718        prjid.write('Xshift        %d\n' %false_easting)
1719        prjid.write('Yshift        %d\n' %false_northing)
1720        prjid.write('Parameters\n')
1721        prjid.close()
1722
1723
1724   
1725        if verbose: print 'Writing %s' %ascfile
1726        NODATA_value = -9999
1727 
1728        ascid = open(demfile, 'w')
1729
1730        ascid.write('ncols         %d\n' %ncols)
1731        ascid.write('nrows         %d\n' %nrows)
1732        ascid.write('xllcorner     %d\n' %newxllcorner)
1733        ascid.write('yllcorner     %d\n' %newyllcorner)
1734        ascid.write('cellsize      %f\n' %cellsize)
1735        ascid.write('NODATA_value  %d\n' %NODATA_value)
1736
1737
1738        #Get bounding polygon from mesh
1739        P = interp.mesh.get_boundary_polygon()
1740        inside_indices = inside_polygon(grid_points, P)
1741
1742        for i in range(nrows):
1743            if verbose and i%((nrows+10)/10)==0:
1744                print 'Doing row %d of %d' %(i, nrows)
1745
1746            for j in range(ncols):
1747                index = (nrows-i-1)*ncols+j
1748
1749                if sometrue(inside_indices == index):
1750                    ascid.write('%f ' %grid_values[index])
1751                else:
1752                    ascid.write('%d ' %NODATA_value)
1753
1754            ascid.write('\n')
1755
1756        #Close
1757        ascid.close()
1758        fid.close()
1759
1760#Backwards compatibility       
1761def sww2asc(basename_in, basename_out = None,
1762            quantity = None,
1763            timestep = None,
1764            reduction = None,
1765            cellsize = 10,
1766            verbose = False,
1767            origin = None):
1768    print 'sww2asc will soon be obsoleted - please use sww2dem'
1769    sww2dem(basename_in,
1770            basename_out = basename_out,
1771            quantity = quantity,
1772            timestep = timestep,
1773            reduction = reduction,
1774            cellsize = cellsize,
1775            verbose = verbose,
1776            origin = origin,
1777            datum = 'WGS84',
1778            format = 'asc')
1779           
1780def sww2ers(basename_in, basename_out = None,
1781            quantity = None,
1782            timestep = None,
1783            reduction = None,
1784            cellsize = 10,
1785            verbose = False,
1786            origin = None,
1787            datum = 'WGS84'):
1788    print 'sww2ers will soon be obsoleted - please use sww2dem'
1789    sww2dem(basename_in, 
1790            basename_out = basename_out,
1791            quantity = quantity,
1792            timestep = timestep,
1793            reduction = reduction,
1794            cellsize = cellsize,
1795            verbose = verbose,
1796            origin = origin,
1797            datum = datum,
1798            format = 'ers')         
1799################################# END COMPATIBILITY ##############         
1800
1801
1802
1803
1804def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1805                                  verbose=False):
1806    """Read Digitial Elevation model from the following ASCII format (.asc)
1807
1808    Example:
1809
1810    ncols         3121
1811    nrows         1800
1812    xllcorner     722000
1813    yllcorner     5893000
1814    cellsize      25
1815    NODATA_value  -9999
1816    138.3698 137.4194 136.5062 135.5558 ..........
1817
1818    Convert basename_in + '.asc' to NetCDF format (.dem)
1819    mimicking the ASCII format closely.
1820
1821
1822    An accompanying file with same basename_in but extension .prj must exist
1823    and is used to fix the UTM zone, datum, false northings and eastings.
1824
1825    The prj format is assumed to be as
1826
1827    Projection    UTM
1828    Zone          56
1829    Datum         WGS84
1830    Zunits        NO
1831    Units         METERS
1832    Spheroid      WGS84
1833    Xshift        0.0000000000
1834    Yshift        10000000.0000000000
1835    Parameters
1836    """
1837
1838    import os
1839    from Scientific.IO.NetCDF import NetCDFFile
1840    from Numeric import Float, array
1841
1842    #root, ext = os.path.splitext(basename_in)
1843    root = basename_in
1844
1845    ###########################################
1846    # Read Meta data
1847    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1848    metadatafile = open(root + '.prj')
1849    metalines = metadatafile.readlines()
1850    metadatafile.close()
1851
1852    L = metalines[0].strip().split()
1853    assert L[0].strip().lower() == 'projection'
1854    projection = L[1].strip()                   #TEXT
1855
1856    L = metalines[1].strip().split()
1857    assert L[0].strip().lower() == 'zone'
1858    zone = int(L[1].strip())
1859
1860    L = metalines[2].strip().split()
1861    assert L[0].strip().lower() == 'datum'
1862    datum = L[1].strip()                        #TEXT
1863
1864    L = metalines[3].strip().split()
1865    assert L[0].strip().lower() == 'zunits'     #IGNORE
1866    zunits = L[1].strip()                       #TEXT
1867
1868    L = metalines[4].strip().split()
1869    assert L[0].strip().lower() == 'units'
1870    units = L[1].strip()                        #TEXT
1871
1872    L = metalines[5].strip().split()
1873    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1874    spheroid = L[1].strip()                     #TEXT
1875
1876    L = metalines[6].strip().split()
1877    assert L[0].strip().lower() == 'xshift'
1878    false_easting = float(L[1].strip())
1879
1880    L = metalines[7].strip().split()
1881    assert L[0].strip().lower() == 'yshift'
1882    false_northing = float(L[1].strip())
1883
1884    #print false_easting, false_northing, zone, datum
1885
1886
1887    ###########################################
1888    #Read DEM data
1889
1890    datafile = open(basename_in + '.asc')
1891
1892    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1893    lines = datafile.readlines()
1894    datafile.close()
1895
1896    if verbose: print 'Got', len(lines), ' lines'
1897
1898    ncols = int(lines[0].split()[1].strip())
1899    nrows = int(lines[1].split()[1].strip())
1900    xllcorner = float(lines[2].split()[1].strip())
1901    yllcorner = float(lines[3].split()[1].strip())
1902    cellsize = float(lines[4].split()[1].strip())
1903    NODATA_value = int(lines[5].split()[1].strip())
1904
1905    assert len(lines) == nrows + 6
1906
1907
1908    ##########################################
1909
1910
1911    if basename_out == None:
1912        netcdfname = root + '.dem'
1913    else:
1914        netcdfname = basename_out + '.dem'
1915
1916    if verbose: print 'Store to NetCDF file %s' %netcdfname
1917    # NetCDF file definition
1918    fid = NetCDFFile(netcdfname, 'w')
1919
1920    #Create new file
1921    fid.institution = 'Geoscience Australia'
1922    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1923                      'of spatial point data'
1924
1925    fid.ncols = ncols
1926    fid.nrows = nrows
1927    fid.xllcorner = xllcorner
1928    fid.yllcorner = yllcorner
1929    fid.cellsize = cellsize
1930    fid.NODATA_value = NODATA_value
1931
1932    fid.zone = zone
1933    fid.false_easting = false_easting
1934    fid.false_northing = false_northing
1935    fid.projection = projection
1936    fid.datum = datum
1937    fid.units = units
1938
1939
1940    # dimension definitions
1941    fid.createDimension('number_of_rows', nrows)
1942    fid.createDimension('number_of_columns', ncols)
1943
1944    # variable definitions
1945    fid.createVariable('elevation', Float, ('number_of_rows',
1946                                            'number_of_columns'))
1947
1948    # Get handles to the variables
1949    elevation = fid.variables['elevation']
1950
1951    #Store data
1952    n = len(lines[6:])
1953    for i, line in enumerate(lines[6:]):
1954        fields = line.split()
1955        if verbose and i%((n+10)/10)==0:
1956            print 'Processing row %d of %d' %(i, nrows)           
1957
1958        elevation[i, :] = array([float(x) for x in fields])
1959
1960    fid.close()
1961
1962
1963
1964
1965
1966def ferret2sww(basename_in, basename_out = None,
1967               verbose = False,
1968               minlat = None, maxlat = None,
1969               minlon = None, maxlon = None,
1970               mint = None, maxt = None, mean_stage = 0,
1971               origin = None, zscale = 1,
1972               fail_on_NaN = True,
1973               NaN_filler = 0,
1974               elevation = None,
1975               inverted_bathymetry = False
1976               ): #FIXME: Bathymetry should be obtained
1977                                  #from MOST somehow.
1978                                  #Alternatively from elsewhere
1979                                  #or, as a last resort,
1980                                  #specified here.
1981                                  #The value of -100 will work
1982                                  #for the Wollongong tsunami
1983                                  #scenario but is very hacky
1984    """Convert 'Ferret' NetCDF format for wave propagation to
1985    sww format native to pyvolution.
1986
1987    Specify only basename_in and read files of the form
1988    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1989    relative height, x-velocity and y-velocity, respectively.
1990
1991    Also convert latitude and longitude to UTM. All coordinates are
1992    assumed to be given in the GDA94 datum.
1993
1994    min's and max's: If omitted - full extend is used.
1995    To include a value min may equal it, while max must exceed it.
1996    Lat and lon are assuemd to be in decimal degrees
1997
1998    origin is a 3-tuple with geo referenced
1999    UTM coordinates (zone, easting, northing)
2000
2001    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2002    which means that longitude is the fastest
2003    varying dimension (row major order, so to speak)
2004
2005    ferret2sww uses grid points as vertices in a triangular grid
2006    counting vertices from lower left corner upwards, then right
2007    """
2008
2009    import os
2010    from Scientific.IO.NetCDF import NetCDFFile
2011    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2012    from Numeric import allclose, around
2013       
2014    precision = Float
2015
2016
2017    #Get NetCDF data
2018    if verbose: print 'Reading files %s_*.nc' %basename_in
2019    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2020    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2021    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2022    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2023
2024    if basename_out is None:
2025        swwname = basename_in + '.sww'
2026    else:
2027        swwname = basename_out + '.sww'
2028
2029    times = file_h.variables['TIME']
2030    latitudes = file_h.variables['LAT']
2031    longitudes = file_h.variables['LON']
2032
2033
2034
2035    #Precision used by most for lat/lon is 4 or 5 decimals
2036    e_lat = around(file_e.variables['LAT'][:], 5)
2037    e_lon = around(file_e.variables['LON'][:], 5)
2038
2039    #Check that files are compatible
2040    assert allclose(latitudes, file_u.variables['LAT'])
2041    assert allclose(latitudes, file_v.variables['LAT'])
2042    assert allclose(latitudes, e_lat)
2043
2044    assert allclose(longitudes, file_u.variables['LON'])
2045    assert allclose(longitudes, file_v.variables['LON'])
2046    assert allclose(longitudes, e_lon)   
2047
2048
2049
2050    if mint == None:
2051        jmin = 0
2052    else:
2053        jmin = searchsorted(times, mint)
2054
2055    if maxt == None:
2056        jmax=len(times)
2057    else:
2058        jmax = searchsorted(times, maxt)
2059
2060    if minlat == None:
2061        kmin=0
2062    else:
2063        kmin = searchsorted(latitudes, minlat)
2064
2065    if maxlat == None:
2066        kmax = len(latitudes)
2067    else:
2068        kmax = searchsorted(latitudes, maxlat)
2069
2070    if minlon == None:
2071        lmin=0
2072    else:
2073        lmin = searchsorted(longitudes, minlon)
2074
2075    if maxlon == None:
2076        lmax = len(longitudes)
2077    else:
2078        lmax = searchsorted(longitudes, maxlon)
2079
2080
2081
2082    times = times[jmin:jmax]
2083    latitudes = latitudes[kmin:kmax]
2084    longitudes = longitudes[lmin:lmax]
2085
2086
2087    if verbose: print 'cropping'
2088    zname = 'ELEVATION'
2089
2090   
2091    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2092    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2093    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2094    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2095
2096#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2097#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2098#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2099#        from Numeric import asarray
2100#        elevations=elevations.tolist()
2101#        elevations.reverse()
2102#        elevations=asarray(elevations)
2103#    else:
2104#        from Numeric import asarray
2105#        elevations=elevations.tolist()
2106#        elevations.reverse()
2107#        elevations=asarray(elevations)
2108#        'print hmmm'
2109
2110
2111
2112    #Get missing values
2113    nan_ha = file_h.variables['HA'].missing_value[0]
2114    nan_ua = file_u.variables['UA'].missing_value[0]
2115    nan_va = file_v.variables['VA'].missing_value[0]
2116    if hasattr(file_e.variables[zname],'missing_value'):
2117        nan_e  = file_e.variables[zname].missing_value[0]
2118    else:
2119        nan_e = None
2120
2121    #Cleanup
2122    from Numeric import sometrue
2123
2124    missing = (amplitudes == nan_ha)
2125    if sometrue (missing):
2126        if fail_on_NaN:
2127            msg = 'NetCDFFile %s contains missing values'\
2128                  %(basename_in+'_ha.nc')
2129            raise msg
2130        else:
2131            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2132
2133    missing = (uspeed == nan_ua)
2134    if sometrue (missing):
2135        if fail_on_NaN:
2136            msg = 'NetCDFFile %s contains missing values'\
2137                  %(basename_in+'_ua.nc')
2138            raise msg
2139        else:
2140            uspeed = uspeed*(missing==0) + missing*NaN_filler
2141
2142    missing = (vspeed == nan_va)
2143    if sometrue (missing):
2144        if fail_on_NaN:
2145            msg = 'NetCDFFile %s contains missing values'\
2146                  %(basename_in+'_va.nc')
2147            raise msg
2148        else:
2149            vspeed = vspeed*(missing==0) + missing*NaN_filler
2150
2151
2152    missing = (elevations == nan_e)
2153    if sometrue (missing):
2154        if fail_on_NaN:
2155            msg = 'NetCDFFile %s contains missing values'\
2156                  %(basename_in+'_e.nc')
2157            raise msg
2158        else:
2159            elevations = elevations*(missing==0) + missing*NaN_filler
2160
2161    #######
2162
2163
2164
2165    number_of_times = times.shape[0]
2166    number_of_latitudes = latitudes.shape[0]
2167    number_of_longitudes = longitudes.shape[0]
2168
2169    assert amplitudes.shape[0] == number_of_times
2170    assert amplitudes.shape[1] == number_of_latitudes
2171    assert amplitudes.shape[2] == number_of_longitudes
2172
2173    if verbose:
2174        print '------------------------------------------------'
2175        print 'Statistics:'
2176        print '  Extent (lat/lon):'
2177        print '    lat in [%f, %f], len(lat) == %d'\
2178              %(min(latitudes.flat), max(latitudes.flat),
2179                len(latitudes.flat))
2180        print '    lon in [%f, %f], len(lon) == %d'\
2181              %(min(longitudes.flat), max(longitudes.flat),
2182                len(longitudes.flat))
2183        print '    t in [%f, %f], len(t) == %d'\
2184              %(min(times.flat), max(times.flat), len(times.flat))
2185
2186        q = amplitudes.flat
2187        name = 'Amplitudes (ha) [cm]'
2188        print %s in [%f, %f]' %(name, min(q), max(q))
2189
2190        q = uspeed.flat
2191        name = 'Speeds (ua) [cm/s]'
2192        print %s in [%f, %f]' %(name, min(q), max(q))
2193
2194        q = vspeed.flat
2195        name = 'Speeds (va) [cm/s]'
2196        print %s in [%f, %f]' %(name, min(q), max(q))
2197
2198        q = elevations.flat
2199        name = 'Elevations (e) [m]'
2200        print %s in [%f, %f]' %(name, min(q), max(q))
2201
2202
2203    #print number_of_latitudes, number_of_longitudes
2204    number_of_points = number_of_latitudes*number_of_longitudes
2205    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2206
2207
2208    file_h.close()
2209    file_u.close()
2210    file_v.close()
2211    file_e.close()
2212
2213
2214    # NetCDF file definition
2215    outfile = NetCDFFile(swwname, 'w')
2216
2217    #Create new file
2218    outfile.institution = 'Geoscience Australia'
2219    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2220                          %(basename_in + '_ha.nc',
2221                            basename_in + '_ua.nc',
2222                            basename_in + '_va.nc',
2223                            basename_in + '_e.nc')
2224
2225
2226    #For sww compatibility
2227    outfile.smoothing = 'Yes'
2228    outfile.order = 1
2229
2230    #Start time in seconds since the epoch (midnight 1/1/1970)
2231    outfile.starttime = starttime = times[0]
2232    times = times - starttime  #Store relative times
2233
2234    # dimension definitions
2235    outfile.createDimension('number_of_volumes', number_of_volumes)
2236
2237    outfile.createDimension('number_of_vertices', 3)
2238    outfile.createDimension('number_of_points', number_of_points)
2239
2240
2241    #outfile.createDimension('number_of_timesteps', len(times))
2242    outfile.createDimension('number_of_timesteps', len(times))
2243
2244    # variable definitions
2245    outfile.createVariable('x', precision, ('number_of_points',))
2246    outfile.createVariable('y', precision, ('number_of_points',))
2247    outfile.createVariable('elevation', precision, ('number_of_points',))
2248
2249    #FIXME: Backwards compatibility
2250    outfile.createVariable('z', precision, ('number_of_points',))
2251    #################################
2252
2253    outfile.createVariable('volumes', Int, ('number_of_volumes',
2254                                            'number_of_vertices'))
2255
2256    outfile.createVariable('time', precision,
2257                           ('number_of_timesteps',))
2258
2259    outfile.createVariable('stage', precision,
2260                           ('number_of_timesteps',
2261                            'number_of_points'))
2262
2263    outfile.createVariable('xmomentum', precision,
2264                           ('number_of_timesteps',
2265                            'number_of_points'))
2266
2267    outfile.createVariable('ymomentum', precision,
2268                           ('number_of_timesteps',
2269                            'number_of_points'))
2270
2271
2272    #Store
2273    from coordinate_transforms.redfearn import redfearn
2274    x = zeros(number_of_points, Float)  #Easting
2275    y = zeros(number_of_points, Float)  #Northing
2276
2277
2278    if verbose: print 'Making triangular grid'
2279    #Check zone boundaries
2280    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2281
2282    vertices = {}
2283    i = 0
2284    for k, lat in enumerate(latitudes):       #Y direction
2285        for l, lon in enumerate(longitudes):  #X direction
2286
2287            vertices[l,k] = i
2288
2289            zone, easting, northing = redfearn(lat,lon)
2290
2291            msg = 'Zone boundary crossed at longitude =', lon
2292            #assert zone == refzone, msg
2293            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2294            x[i] = easting
2295            y[i] = northing
2296            i += 1
2297
2298
2299    #Construct 2 triangles per 'rectangular' element
2300    volumes = []
2301    for l in range(number_of_longitudes-1):    #X direction
2302        for k in range(number_of_latitudes-1): #Y direction
2303            v1 = vertices[l,k+1]
2304            v2 = vertices[l,k]
2305            v3 = vertices[l+1,k+1]
2306            v4 = vertices[l+1,k]
2307
2308            volumes.append([v1,v2,v3]) #Upper element
2309            volumes.append([v4,v3,v2]) #Lower element
2310
2311    volumes = array(volumes)
2312
2313    if origin == None:
2314        zone = refzone
2315        xllcorner = min(x)
2316        yllcorner = min(y)
2317    else:
2318        zone = origin[0]
2319        xllcorner = origin[1]
2320        yllcorner = origin[2]
2321
2322
2323    outfile.xllcorner = xllcorner
2324    outfile.yllcorner = yllcorner
2325    outfile.zone = zone
2326
2327
2328    if elevation is not None:
2329        z = elevation
2330    else:
2331        if inverted_bathymetry:
2332            z = -1*elevations
2333        else:
2334            z = elevations
2335        #FIXME: z should be obtained from MOST and passed in here
2336
2337    from Numeric import resize
2338    z = resize(z,outfile.variables['z'][:].shape)
2339    outfile.variables['x'][:] = x - xllcorner
2340    outfile.variables['y'][:] = y - yllcorner
2341    outfile.variables['z'][:] = z             #FIXME HACK
2342    outfile.variables['elevation'][:] = z 
2343    outfile.variables['time'][:] = times   #Store time relative
2344    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2345
2346
2347
2348    #Time stepping
2349    stage = outfile.variables['stage']
2350    xmomentum = outfile.variables['xmomentum']
2351    ymomentum = outfile.variables['ymomentum']
2352
2353    if verbose: print 'Converting quantities'
2354    n = len(times)
2355    for j in range(n):
2356        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2357        i = 0
2358        for k in range(number_of_latitudes):      #Y direction
2359            for l in range(number_of_longitudes): #X direction
2360                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2361                stage[j,i] = w
2362                h = w - z[i]
2363                xmomentum[j,i] = uspeed[j,k,l]/100*h
2364                ymomentum[j,i] = vspeed[j,k,l]/100*h
2365                i += 1
2366
2367    #outfile.close()               
2368
2369    #FIXME: Refactor using code from file_function.statistics
2370    #Something like print swwstats(swwname)
2371    if verbose:
2372        x = outfile.variables['x'][:]
2373        y = outfile.variables['y'][:]
2374        times = outfile.variables['time'][:]
2375        print '------------------------------------------------'
2376        print 'Statistics of output file:'
2377        print '  Name: %s' %swwname
2378        print '  Reference:'
2379        print '    Lower left corner: [%f, %f]'\
2380              %(xllcorner, yllcorner)
2381        print '    Start time: %f' %starttime
2382        print '  Extent:'
2383        print '    x [m] in [%f, %f], len(x) == %d'\
2384              %(min(x.flat), max(x.flat), len(x.flat))
2385        print '    y [m] in [%f, %f], len(y) == %d'\
2386              %(min(y.flat), max(y.flat), len(y.flat))
2387        print '    t [s] in [%f, %f], len(t) == %d'\
2388              %(min(times), max(times), len(times))
2389        print '  Quantities [SI units]:'
2390        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2391            q = outfile.variables[name][:].flat
2392            print '    %s in [%f, %f]' %(name, min(q), max(q))
2393
2394
2395
2396    outfile.close()               
2397
2398
2399
2400
2401
2402def timefile2netcdf(filename, quantity_names = None):
2403    """Template for converting typical text files with time series to
2404    NetCDF tms file.
2405
2406
2407    The file format is assumed to be either two fields separated by a comma:
2408
2409        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2410
2411    E.g
2412
2413      31/08/04 00:00:00, 1.328223 0 0
2414      31/08/04 00:15:00, 1.292912 0 0
2415
2416    will provide a time dependent function f(t) with three attributes
2417
2418    filename is assumed to be the rootname with extenisons .txt and .sww
2419    """
2420
2421    import time, calendar
2422    from Numeric import array
2423    from config import time_format
2424    from util import ensure_numeric
2425
2426
2427    fid = open(filename + '.txt')
2428    line = fid.readline()
2429    fid.close()
2430
2431    fields = line.split(',')
2432    msg = 'File %s must have the format date, value0 value1 value2 ...'
2433    assert len(fields) == 2, msg
2434
2435    try:
2436        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2437    except ValueError:
2438        msg = 'First field in file %s must be' %filename
2439        msg += ' date-time with format %s.\n' %time_format
2440        msg += 'I got %s instead.' %fields[0]
2441        raise msg
2442
2443
2444    #Split values
2445    values = []
2446    for value in fields[1].split():
2447        values.append(float(value))
2448
2449    q = ensure_numeric(values)
2450
2451    msg = 'ERROR: File must contain at least one independent value'
2452    assert len(q.shape) == 1, msg
2453
2454
2455
2456    #Read times proper
2457    from Numeric import zeros, Float, alltrue
2458    from config import time_format
2459    import time, calendar
2460
2461    fid = open(filename + '.txt')
2462    lines = fid.readlines()
2463    fid.close()
2464
2465    N = len(lines)
2466    d = len(q)
2467
2468    T = zeros(N, Float)       #Time
2469    Q = zeros((N, d), Float)  #Values
2470
2471    for i, line in enumerate(lines):
2472        fields = line.split(',')
2473        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2474       
2475        T[i] = realtime - starttime
2476
2477        for j, value in enumerate(fields[1].split()):
2478            Q[i, j] = float(value)
2479
2480    msg = 'File %s must list time as a monotonuosly ' %filename
2481    msg += 'increasing sequence'
2482    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2483
2484
2485    #Create NetCDF file
2486    from Scientific.IO.NetCDF import NetCDFFile
2487
2488    fid = NetCDFFile(filename + '.tms', 'w')
2489
2490
2491    fid.institution = 'Geoscience Australia'
2492    fid.description = 'Time series'
2493
2494
2495    #Reference point
2496    #Start time in seconds since the epoch (midnight 1/1/1970)
2497    #FIXME: Use Georef
2498    fid.starttime = starttime
2499   
2500   
2501    # dimension definitions
2502    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2503    #fid.createDimension('number_of_vertices', 3)
2504
2505
2506    fid.createDimension('number_of_timesteps', len(T))
2507
2508    fid.createVariable('time', Float, ('number_of_timesteps',))
2509
2510    fid.variables['time'][:] = T
2511   
2512    for i in range(Q.shape[1]):
2513        try:
2514            name = quantity_names[i]
2515        except:   
2516            name = 'Attribute%d'%i
2517           
2518        fid.createVariable(name, Float, ('number_of_timesteps',))
2519        fid.variables[name][:] = Q[:,i]       
2520
2521    fid.close()
2522
2523
2524def extent_sww(file_name):
2525    """
2526    Read in an sww file.
2527
2528    Input;
2529    file_name - the sww file
2530
2531    Output;
2532    z - Vector of bed elevation
2533    volumes - Array.  Each row has 3 values, representing
2534    the vertices that define the volume
2535    time - Vector of the times where there is stage information
2536    stage - array with respect to time and vertices (x,y)
2537    """
2538
2539
2540    from Scientific.IO.NetCDF import NetCDFFile
2541
2542    #Check contents
2543    #Get NetCDF
2544    fid = NetCDFFile(file_name, 'r')
2545
2546    # Get the variables
2547    x = fid.variables['x'][:]
2548    y = fid.variables['y'][:]
2549    stage = fid.variables['stage'][:]
2550    #print "stage",stage
2551    #print "stage.shap",stage.shape
2552    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2553    #print "min(stage)",min(stage)
2554
2555    fid.close()
2556
2557    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2558
2559
2560def sww2domain(filename,boundary=None,t=None,\
2561               fail_if_NaN=True,NaN_filler=0\
2562               ,verbose = True,very_verbose = False):
2563    """
2564    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2565
2566    Boundary is not recommended if domian.smooth is not selected, as it
2567    uses unique coordinates, but not unique boundaries. This means that
2568    the boundary file will not be compatable with the coordiantes, and will
2569    give a different final boundary, or crash.
2570    """
2571    NaN=9.969209968386869e+036
2572    #initialise NaN.
2573
2574    from Scientific.IO.NetCDF import NetCDFFile
2575    from shallow_water import Domain
2576    from Numeric import asarray, transpose, resize
2577
2578    if verbose: print 'Reading from ', filename
2579    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2580    time = fid.variables['time']       #Timesteps
2581    if t is None:
2582        t = time[-1]
2583    time_interp = get_time_interp(time,t)
2584
2585    # Get the variables as Numeric arrays
2586    x = fid.variables['x'][:]             #x-coordinates of vertices
2587    y = fid.variables['y'][:]             #y-coordinates of vertices
2588    elevation = fid.variables['elevation']     #Elevation
2589    stage = fid.variables['stage']     #Water level
2590    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2591    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2592
2593    starttime = fid.starttime[0]
2594    volumes = fid.variables['volumes'][:] #Connectivity
2595    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2596
2597    conserved_quantities = []
2598    interpolated_quantities = {}
2599    other_quantities = []
2600
2601    # get geo_reference
2602    #sww files don't have to have a geo_ref
2603    try:
2604        geo_reference = Geo_reference(NetCDFObject=fid)
2605    except: #AttributeError, e:
2606        geo_reference = None
2607
2608    if verbose: print '    getting quantities'
2609    for quantity in fid.variables.keys():
2610        dimensions = fid.variables[quantity].dimensions
2611        if 'number_of_timesteps' in dimensions:
2612            conserved_quantities.append(quantity)
2613            interpolated_quantities[quantity]=\
2614                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2615        else: other_quantities.append(quantity)
2616
2617    other_quantities.remove('x')
2618    other_quantities.remove('y')
2619    other_quantities.remove('z')
2620    other_quantities.remove('volumes')
2621
2622    conserved_quantities.remove('time')
2623
2624    if verbose: print '    building domain'
2625#    From domain.Domain:
2626#    domain = Domain(coordinates, volumes,\
2627#                    conserved_quantities = conserved_quantities,\
2628#                    other_quantities = other_quantities,zone=zone,\
2629#                    xllcorner=xllcorner, yllcorner=yllcorner)
2630
2631#   From shallow_water.Domain:
2632    coordinates=coordinates.tolist()
2633    volumes=volumes.tolist()
2634    #FIXME:should this be in mesh?(peter row)
2635    if fid.smoothing == 'Yes': unique = False
2636    else: unique = True
2637    if unique:
2638        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2639
2640
2641    domain = Domain(coordinates, volumes, boundary)
2642
2643    if not boundary is None:
2644        domain.boundary = boundary
2645
2646    domain.geo_reference = geo_reference
2647
2648    domain.starttime=float(starttime)+float(t)
2649    domain.time=0.0
2650
2651    for quantity in other_quantities:
2652        try:
2653            NaN = fid.variables[quantity].missing_value
2654        except:
2655            pass #quantity has no missing_value number
2656        X = fid.variables[quantity][:]
2657        if very_verbose:
2658            print '       ',quantity
2659            print '        NaN =',NaN
2660            print '        max(X)'
2661            print '       ',max(X)
2662            print '        max(X)==NaN'
2663            print '       ',max(X)==NaN
2664            print ''
2665        if (max(X)==NaN) or (min(X)==NaN):
2666            if fail_if_NaN:
2667                msg = 'quantity "%s" contains no_data entry'%quantity
2668                raise msg
2669            else:
2670                data = (X<>NaN)
2671                X = (X*data)+(data==0)*NaN_filler
2672        if unique:
2673            X = resize(X,(len(X)/3,3))
2674        domain.set_quantity(quantity,X)
2675#
2676    for quantity in conserved_quantities:
2677        try:
2678            NaN = fid.variables[quantity].missing_value
2679        except:
2680            pass #quantity has no missing_value number
2681        X = interpolated_quantities[quantity]
2682        if very_verbose:
2683            print '       ',quantity
2684            print '        NaN =',NaN
2685            print '        max(X)'
2686            print '       ',max(X)
2687            print '        max(X)==NaN'
2688            print '       ',max(X)==NaN
2689            print ''
2690        if (max(X)==NaN) or (min(X)==NaN):
2691            if fail_if_NaN:
2692                msg = 'quantity "%s" contains no_data entry'%quantity
2693                raise msg
2694            else:
2695                data = (X<>NaN)
2696                X = (X*data)+(data==0)*NaN_filler
2697        if unique:
2698            X = resize(X,(X.shape[0]/3,3))
2699        domain.set_quantity(quantity,X)
2700    fid.close()
2701    return domain
2702
2703def interpolated_quantity(saved_quantity,time_interp):
2704
2705    #given an index and ratio, interpolate quantity with respect to time.
2706    index,ratio = time_interp
2707    Q = saved_quantity
2708    if ratio > 0:
2709        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2710    else:
2711        q = Q[index]
2712    #Return vector of interpolated values
2713    return q
2714
2715def get_time_interp(time,t=None):
2716    #Finds the ratio and index for time interpolation.
2717    #It is borrowed from previous pyvolution code.
2718    if t is None:
2719        t=time[-1]
2720        index = -1
2721        ratio = 0.
2722    else:
2723        T = time
2724        tau = t
2725        index=0
2726        msg = 'Time interval derived from file %s [%s:%s]'\
2727            %('FIXMEfilename', T[0], T[-1])
2728        msg += ' does not match model time: %s' %tau
2729        if tau < time[0]: raise msg
2730        if tau > time[-1]: raise msg
2731        while tau > time[index]: index += 1
2732        while tau < time[index]: index -= 1
2733        if tau == time[index]:
2734            #Protect against case where tau == time[-1] (last time)
2735            # - also works in general when tau == time[i]
2736            ratio = 0
2737        else:
2738            #t is now between index and index+1
2739            ratio = (tau - time[index])/(time[index+1] - time[index])
2740    return (index,ratio)
2741
2742
2743def weed(coordinates,volumes,boundary = None):
2744    if type(coordinates)=='array':
2745        coordinates = coordinates.tolist()
2746    if type(volumes)=='array':
2747        volumes = volumes.tolist()
2748
2749    unique = False
2750    point_dict = {}
2751    same_point = {}
2752    for i in range(len(coordinates)):
2753        point = tuple(coordinates[i])
2754        if point_dict.has_key(point):
2755            unique = True
2756            same_point[i]=point
2757            #to change all point i references to point j
2758        else:
2759            point_dict[point]=i
2760            same_point[i]=point
2761
2762    coordinates = []
2763    i = 0
2764    for point in point_dict.keys():
2765        point = tuple(point)
2766        coordinates.append(list(point))
2767        point_dict[point]=i
2768        i+=1
2769
2770
2771    for volume in volumes:
2772        for i in range(len(volume)):
2773            index = volume[i]
2774            if index>-1:
2775                volume[i]=point_dict[same_point[index]]
2776
2777    new_boundary = {}
2778    if not boundary is None:
2779        for segment in boundary.keys():
2780            point0 = point_dict[same_point[segment[0]]]
2781            point1 = point_dict[same_point[segment[1]]]
2782            label = boundary[segment]
2783            #FIXME should the bounday attributes be concaterated
2784            #('exterior, pond') or replaced ('pond')(peter row)
2785
2786            if new_boundary.has_key((point0,point1)):
2787                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2788                                              #+','+label
2789
2790            elif new_boundary.has_key((point1,point0)):
2791                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2792                                              #+','+label
2793            else: new_boundary[(point0,point1)]=label
2794
2795        boundary = new_boundary
2796
2797    return coordinates,volumes,boundary
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808###############################################
2809#OBSOLETE STUFF
2810#Native checkpoint format.
2811#Information needed to recreate a state is preserved
2812#FIXME: Rethink and maybe use netcdf format
2813def cpt_variable_writer(filename, t, v0, v1, v2):
2814    """Store all conserved quantities to file
2815    """
2816
2817    M, N = v0.shape
2818
2819    FN = create_filename(filename, 'cpt', M, t)
2820    #print 'Writing to %s' %FN
2821
2822    fid = open(FN, 'w')
2823    for i in range(M):
2824        for j in range(N):
2825            fid.write('%.16e ' %v0[i,j])
2826        for j in range(N):
2827            fid.write('%.16e ' %v1[i,j])
2828        for j in range(N):
2829            fid.write('%.16e ' %v2[i,j])
2830
2831        fid.write('\n')
2832    fid.close()
2833
2834
2835def cpt_variable_reader(filename, t, v0, v1, v2):
2836    """Store all conserved quantities to file
2837    """
2838
2839    M, N = v0.shape
2840
2841    FN = create_filename(filename, 'cpt', M, t)
2842    #print 'Reading from %s' %FN
2843
2844    fid = open(FN)
2845
2846
2847    for i in range(M):
2848        values = fid.readline().split() #Get one line
2849
2850        for j in range(N):
2851            v0[i,j] = float(values[j])
2852            v1[i,j] = float(values[3+j])
2853            v2[i,j] = float(values[6+j])
2854
2855    fid.close()
2856
2857def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2858    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2859    elevation.
2860    FIXME: Not in use pt
2861    """
2862
2863    M, N = v0.shape
2864
2865
2866    print X0
2867    import sys; sys.exit()
2868    FN = create_filename(filename, 'cpt', M)
2869    print 'Writing to %s' %FN
2870
2871    fid = open(FN, 'w')
2872    for i in range(M):
2873        for j in range(2):
2874            fid.write('%.16e ' %X0[i,j])   #x, y
2875        for j in range(N):
2876            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2877
2878        for j in range(2):
2879            fid.write('%.16e ' %X1[i,j])   #x, y
2880        for j in range(N):
2881            fid.write('%.16e ' %v1[i,j])
2882
2883        for j in range(2):
2884            fid.write('%.16e ' %X2[i,j])   #x, y
2885        for j in range(N):
2886            fid.write('%.16e ' %v2[i,j])
2887
2888        fid.write('\n')
2889    fid.close()
2890
2891
2892
2893#Function for storing out to e.g. visualisation
2894#FIXME: Do we want this?
2895#FIXME: Not done yet for this version
2896def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2897    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2898    """
2899
2900    M, N = v0.shape
2901
2902    FN = create_filename(filename, 'dat', M)
2903    #print 'Writing to %s' %FN
2904
2905    fid = open(FN, 'w')
2906    for i in range(M):
2907        for j in range(2):
2908            fid.write('%f ' %X0[i,j])   #x, y
2909        fid.write('%f ' %v0[i,0])       #z
2910
2911        for j in range(2):
2912            fid.write('%f ' %X1[i,j])   #x, y
2913        fid.write('%f ' %v1[i,0])       #z
2914
2915        for j in range(2):
2916            fid.write('%f ' %X2[i,j])   #x, y
2917        fid.write('%f ' %v2[i,0])       #z
2918
2919        fid.write('\n')
2920    fid.close()
2921
2922
2923
2924def dat_variable_writer(filename, t, v0, v1, v2):
2925    """Store water height to file
2926    """
2927
2928    M, N = v0.shape
2929
2930    FN = create_filename(filename, 'dat', M, t)
2931    #print 'Writing to %s' %FN
2932
2933    fid = open(FN, 'w')
2934    for i in range(M):
2935        fid.write('%.4f ' %v0[i,0])
2936        fid.write('%.4f ' %v1[i,0])
2937        fid.write('%.4f ' %v2[i,0])
2938
2939        fid.write('\n')
2940    fid.close()
2941
2942
2943def read_sww(filename):
2944    """Read sww Net CDF file containing Shallow Water Wave simulation
2945
2946    The integer array volumes is of shape Nx3 where N is the number of
2947    triangles in the mesh.
2948
2949    Each entry in volumes is an index into the x,y arrays (the location).
2950
2951    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2952    number_of_timesteps, number_of_points.
2953
2954    The momentum is not always stored.
2955
2956    """
2957    from Scientific.IO.NetCDF import NetCDFFile
2958    print 'Reading from ', filename
2959    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2960#latitude, longitude
2961    # Get the variables as Numeric arrays
2962    x = fid.variables['x']             #x-coordinates of vertices
2963    y = fid.variables['y']             #y-coordinates of vertices
2964    z = fid.variables['elevation']     #Elevation
2965    time = fid.variables['time']       #Timesteps
2966    stage = fid.variables['stage']     #Water level
2967    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2968    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2969
2970    volumes = fid.variables['volumes'] #Connectivity
2971
2972
2973def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2974                 verbose=False):
2975    """Read Digitial Elevation model from the following NetCDF format (.dem)
2976
2977    Example:
2978
2979    ncols         3121
2980    nrows         1800
2981    xllcorner     722000
2982    yllcorner     5893000
2983    cellsize      25
2984    NODATA_value  -9999
2985    138.3698 137.4194 136.5062 135.5558 ..........
2986
2987    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2988    """
2989
2990    import os
2991    from Scientific.IO.NetCDF import NetCDFFile
2992    from Numeric import Float, zeros, sum, reshape, equal
2993
2994    root = basename_in
2995    inname = root + '.dem'
2996
2997    #Open existing netcdf file to read
2998    infile = NetCDFFile(inname, 'r')
2999    if verbose: print 'Reading DEM from %s' %inname
3000
3001    #Read metadata
3002    ncols = infile.ncols[0]
3003    nrows = infile.nrows[0]
3004    xllcorner = infile.xllcorner[0]
3005    yllcorner = infile.yllcorner[0]
3006    cellsize = infile.cellsize[0]
3007    NODATA_value = infile.NODATA_value[0]
3008    zone = infile.zone[0]
3009    false_easting = infile.false_easting[0]
3010    false_northing = infile.false_northing[0]
3011    projection = infile.projection
3012    datum = infile.datum
3013    units = infile.units
3014
3015    dem_elevation = infile.variables['elevation']
3016
3017    #Get output file name
3018    if basename_out == None:
3019        outname = root + '_' + repr(cellsize_new) + '.dem'
3020    else:
3021        outname = basename_out + '.dem'
3022
3023    if verbose: print 'Write decimated NetCDF file to %s' %outname
3024
3025    #Determine some dimensions for decimated grid
3026    (nrows_stencil, ncols_stencil) = stencil.shape
3027    x_offset = ncols_stencil / 2
3028    y_offset = nrows_stencil / 2
3029    cellsize_ratio = int(cellsize_new / cellsize)
3030    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3031    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3032
3033    #Open netcdf file for output
3034    outfile = NetCDFFile(outname, 'w')
3035
3036    #Create new file
3037    outfile.institution = 'Geoscience Australia'
3038    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3039                           'of spatial point data'
3040    #Georeferencing
3041    outfile.zone = zone
3042    outfile.projection = projection
3043    outfile.datum = datum
3044    outfile.units = units
3045
3046    outfile.cellsize = cellsize_new
3047    outfile.NODATA_value = NODATA_value
3048    outfile.false_easting = false_easting
3049    outfile.false_northing = false_northing
3050
3051    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3052    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3053    outfile.ncols = ncols_new
3054    outfile.nrows = nrows_new
3055
3056    # dimension definition
3057    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3058
3059    # variable definition
3060    outfile.createVariable('elevation', Float, ('number_of_points',))
3061
3062    # Get handle to the variable
3063    elevation = outfile.variables['elevation']
3064
3065    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3066
3067    #Store data
3068    global_index = 0
3069    for i in range(nrows_new):
3070        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3071        lower_index = global_index
3072        telev =  zeros(ncols_new, Float)
3073        local_index = 0
3074        trow = i * cellsize_ratio
3075
3076        for j in range(ncols_new):
3077            tcol = j * cellsize_ratio
3078            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3079
3080            #if dem contains 1 or more NODATA_values set value in
3081            #decimated dem to NODATA_value, else compute decimated
3082            #value using stencil
3083            if sum(sum(equal(tmp, NODATA_value))) > 0:
3084                telev[local_index] = NODATA_value
3085            else:
3086                telev[local_index] = sum(sum(tmp * stencil))
3087
3088            global_index += 1
3089            local_index += 1
3090
3091        upper_index = global_index
3092
3093        elevation[lower_index:upper_index] = telev
3094
3095    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3096
3097    infile.close()
3098    outfile.close()
3099
3100
3101   
3102def sww2asc_obsolete(basename_in, basename_out = None,
3103            quantity = None,
3104            timestep = None,
3105            reduction = None,
3106            cellsize = 10,
3107            verbose = False,
3108            origin = None):
3109    """Read SWW file and convert to Digitial Elevation model format (.asc)
3110
3111    Example:
3112
3113    ncols         3121
3114    nrows         1800
3115    xllcorner     722000
3116    yllcorner     5893000
3117    cellsize      25
3118    NODATA_value  -9999
3119    138.3698 137.4194 136.5062 135.5558 ..........
3120
3121    Also write accompanying file with same basename_in but extension .prj
3122    used to fix the UTM zone, datum, false northings and eastings.
3123
3124    The prj format is assumed to be as
3125
3126    Projection    UTM
3127    Zone          56
3128    Datum         WGS84
3129    Zunits        NO
3130    Units         METERS
3131    Spheroid      WGS84
3132    Xshift        0.0000000000
3133    Yshift        10000000.0000000000
3134    Parameters
3135
3136
3137    if quantity is given, out values from quantity otherwise default to
3138    elevation
3139
3140    if timestep (an index) is given, output quantity at that timestep
3141
3142    if reduction is given use that to reduce quantity over all timesteps.
3143
3144    """
3145    from Numeric import array, Float, concatenate, NewAxis, zeros,\
3146         sometrue
3147    from utilities.polygon import inside_polygon
3148
3149    #FIXME: Should be variable
3150    datum = 'WGS84'
3151    false_easting = 500000
3152    false_northing = 10000000
3153
3154    if quantity is None:
3155        quantity = 'elevation'
3156
3157    if reduction is None:
3158        reduction = max
3159
3160    if basename_out is None:
3161        basename_out = basename_in + '_%s' %quantity
3162
3163    swwfile = basename_in + '.sww'
3164    ascfile = basename_out + '.asc'
3165    prjfile = basename_out + '.prj'
3166
3167
3168    if verbose: print 'Reading from %s' %swwfile
3169    #Read sww file
3170    from Scientific.IO.NetCDF import NetCDFFile
3171    fid = NetCDFFile(swwfile)
3172
3173    #Get extent and reference
3174    x = fid.variables['x'][:]
3175    y = fid.variables['y'][:]
3176    volumes = fid.variables['volumes'][:]
3177
3178    ymin = min(y); ymax = max(y)
3179    xmin = min(x); xmax = max(x)
3180
3181    number_of_timesteps = fid.dimensions['number_of_timesteps']
3182    number_of_points = fid.dimensions['number_of_points']
3183    if origin is None:
3184
3185        #Get geo_reference
3186        #sww files don't have to have a geo_ref
3187        try:
3188            geo_reference = Geo_reference(NetCDFObject=fid)
3189        except AttributeError, e:
3190            geo_reference = Geo_reference() #Default georef object
3191
3192        xllcorner = geo_reference.get_xllcorner()
3193        yllcorner = geo_reference.get_yllcorner()
3194        zone = geo_reference.get_zone()
3195    else:
3196        zone = origin[0]
3197        xllcorner = origin[1]
3198        yllcorner = origin[2]
3199
3200
3201    #Get quantity and reduce if applicable
3202    if verbose: print 'Reading quantity %s' %quantity
3203
3204    if quantity.lower() == 'depth':
3205        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3206    else:
3207        q = fid.variables[quantity][:]
3208
3209
3210    if len(q.shape) == 2:
3211        if verbose: print 'Reducing quantity %s' %quantity
3212        q_reduced = zeros( number_of_points, Float )
3213
3214        for k in range(number_of_points):
3215            q_reduced[k] = reduction( q[:,k] )
3216
3217        q = q_reduced
3218
3219    #Now q has dimension: number_of_points
3220
3221    #Create grid and update xll/yll corner and x,y
3222    if verbose: print 'Creating grid'
3223    ncols = int((xmax-xmin)/cellsize)+1
3224    nrows = int((ymax-ymin)/cellsize)+1
3225
3226    newxllcorner = xmin+xllcorner
3227    newyllcorner = ymin+yllcorner
3228
3229    x = x+xllcorner-newxllcorner
3230    y = y+yllcorner-newyllcorner
3231
3232    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3233    assert len(vertex_points.shape) == 2
3234
3235
3236    from Numeric import zeros, Float
3237    grid_points = zeros ( (ncols*nrows, 2), Float )
3238
3239
3240    for i in xrange(nrows):
3241        yg = i*cellsize
3242        for j in xrange(ncols):
3243            xg = j*cellsize
3244            k = i*ncols + j
3245
3246            grid_points[k,0] = xg
3247            grid_points[k,1] = yg
3248
3249    #Interpolate
3250    from least_squares import Interpolation
3251   
3252
3253    #FIXME: This should be done with precrop = True, otherwise it'll
3254    #take forever. With expand_search  set to False, some grid points might
3255    #miss out....
3256
3257    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3258                           precrop = False, expand_search = True,
3259                           verbose = verbose)
3260
3261    #Interpolate using quantity values
3262    if verbose: print 'Interpolating'
3263    grid_values = interp.interpolate(q).flat
3264
3265    #Write
3266    #Write prj file
3267    if verbose: print 'Writing %s' %prjfile
3268    prjid = open(prjfile, 'w')
3269    prjid.write('Projection    %s\n' %'UTM')
3270    prjid.write('Zone          %d\n' %zone)
3271    prjid.write('Datum         %s\n' %datum)
3272    prjid.write('Zunits        NO\n')
3273    prjid.write('Units         METERS\n')
3274    prjid.write('Spheroid      %s\n' %datum)
3275    prjid.write('Xshift        %d\n' %false_easting)
3276    prjid.write('Yshift        %d\n' %false_northing)
3277    prjid.write('Parameters\n')
3278    prjid.close()
3279
3280
3281   
3282    if verbose: print 'Writing %s' %ascfile
3283    NODATA_value = -9999
3284 
3285    ascid = open(ascfile, 'w')
3286
3287    ascid.write('ncols         %d\n' %ncols)
3288    ascid.write('nrows         %d\n' %nrows)
3289    ascid.write('xllcorner     %d\n' %newxllcorner)
3290    ascid.write('yllcorner     %d\n' %newyllcorner)
3291    ascid.write('cellsize      %f\n' %cellsize)
3292    ascid.write('NODATA_value  %d\n' %NODATA_value)
3293
3294
3295    #Get bounding polygon from mesh
3296    P = interp.mesh.get_boundary_polygon()
3297    inside_indices = inside_polygon(grid_points, P)
3298
3299    for i in range(nrows):
3300        if verbose and i%((nrows+10)/10)==0:
3301            print 'Doing row %d of %d' %(i, nrows)
3302
3303        for j in range(ncols):
3304            index = (nrows-i-1)*ncols+j
3305
3306            if sometrue(inside_indices == index):
3307                ascid.write('%f ' %grid_values[index])
3308            else:
3309                ascid.write('%d ' %NODATA_value)
3310
3311        ascid.write('\n')
3312
3313    #Close
3314    ascid.close()
3315    fid.close()
3316
3317def asc_csiro2sww(bath_dir,
3318                  elevation_dir,
3319                  ucur_dir,
3320                  vcur_dir,
3321                  sww_file,
3322                  minlat = None, maxlat = None,
3323                  minlon = None, maxlon = None,
3324                  zscale=1,
3325                  mean_stage = 0,
3326                  fail_on_NaN = True,
3327                  elevation_NaN_filler = 0,
3328                  bath_prefix='ba',
3329                  elevation_prefix='el',
3330                  verbose=False):
3331    """
3332
3333    Also convert latitude and longitude to UTM. All coordinates are
3334    assumed to be given in the GDA94 datum.
3335   
3336    assume:
3337    All files are in ersi asc format
3338
3339    4 types of information
3340    bathymetry
3341    elevation
3342    u velocity
3343    v velocity
3344
3345    Assumptions
3346    The metadata of all the files is the same
3347    Each type is in a seperate directory
3348    One bath file with extention .000
3349    The time period is less than 24hrs and uniform.
3350    """
3351    from Scientific.IO.NetCDF import NetCDFFile
3352       
3353    from coordinate_transforms.redfearn import redfearn
3354   
3355    precision = Float # So if we want to change the precision its done here
3356   
3357    # go in to the bath dir and load the only file,
3358    bath_files = os.listdir(bath_dir)
3359    #print "bath_files",bath_files
3360
3361    #fixme: make more general?
3362    bath_file = bath_files[0]
3363    bath_dir_file =  bath_dir + os.sep + bath_file
3364    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3365    #print "bath_metadata",bath_metadata
3366    #print "bath_grid",bath_grid
3367
3368    #Use the date.time of the bath file as a basis for
3369    #the start time for other files
3370    base_start = bath_file[-12:]
3371   
3372    #go into the elevation dir and load the 000 file
3373    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3374                         + base_start
3375    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3376    #print "elevation_dir_file",elevation_dir_file
3377    #print "elevation_grid", elevation_grid
3378   
3379    elevation_files = os.listdir(elevation_dir)
3380    ucur_files = os.listdir(ucur_dir)
3381    vcur_files = os.listdir(vcur_dir)
3382
3383    # the first elevation file should be the
3384    # file with the same base name as the bath data
3385    #print "elevation_files[0]",elevation_files[0]
3386    #print "'el' + base_start",'el' + base_start
3387    assert elevation_files[0] == 'el' + base_start
3388   
3389    #assert bath_metadata == elevation_metadata
3390
3391
3392
3393    number_of_latitudes = bath_grid.shape[0] 
3394    number_of_longitudes = bath_grid.shape[1]
3395    number_of_times = len(os.listdir(elevation_dir))
3396    number_of_points = number_of_latitudes*number_of_longitudes
3397    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3398
3399    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3400                  for x in range(number_of_longitudes)]
3401    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3402                 for y in range(number_of_latitudes)]
3403   
3404     # reverse order of lat, so the fist lat represents the first grid row
3405    latitudes.reverse()
3406   
3407    #print "latitudes - before get_min_max_indexes",latitudes
3408    kmin, kmax, lmin, lmax = get_min_max_indexes(latitudes,longitudes,
3409                                                 minlat=minlat, maxlat=maxlat,
3410                                                 minlon=minlon, maxlon=maxlon)
3411   
3412
3413    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3414    #print "bath_grid",bath_grid
3415    number_of_latitudes = bath_grid.shape[0] 
3416    number_of_longitudes = bath_grid.shape[1]
3417    number_of_times = len(os.listdir(elevation_dir))
3418    number_of_points = number_of_latitudes*number_of_longitudes
3419    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3420    #print "number_of_points",number_of_points
3421
3422    latitudes = latitudes[kmin:kmax]
3423    longitudes = longitudes[lmin:lmax]
3424    number_of_latitudes = len(latitudes)
3425    number_of_longitudes = len(longitudes)
3426
3427
3428    #Work out the times
3429    if len(elevation_files) > 1:
3430        # Assume: The time period is less than 24hrs.
3431        time_period = (int(elevation_files[1][-3:]) - \
3432                      int(elevation_files[0][-3:]))*60*60
3433        times = [x*time_period for x in range(len(elevation_files))]
3434    else:
3435        times = [0.0]
3436    #print "times", times
3437    #print "number_of_latitudes", number_of_latitudes
3438    #print "number_of_longitudes", number_of_longitudes
3439    #print "number_of_times", number_of_times
3440    #print "latitudes", latitudes
3441    #print "longitudes", longitudes
3442
3443
3444    if verbose:
3445        print '------------------------------------------------'
3446        print 'Statistics:'
3447        print '  Extent (lat/lon):'
3448        print '    lat in [%f, %f], len(lat) == %d'\
3449              %(min(latitudes), max(latitudes),
3450                len(latitudes))
3451        print '    lon in [%f, %f], len(lon) == %d'\
3452              %(min(longitudes), max(longitudes),
3453                len(longitudes))
3454        print '    t in [%f, %f], len(t) == %d'\
3455              %(min(times), max(times), len(times))
3456   
3457    ######### WRITE THE SWW FILE #############
3458    # NetCDF file definition
3459    outfile = NetCDFFile(sww_file, 'w')
3460
3461    #Create new file
3462    outfile.institution = 'Geoscience Australia'
3463    outfile.description = 'Converted from XXX'
3464
3465
3466    #For sww compatibility
3467    outfile.smoothing = 'Yes'
3468    outfile.order = 1
3469   
3470   
3471    # dimension definitions
3472    outfile.createDimension('number_of_volumes', number_of_volumes)
3473
3474    outfile.createDimension('number_of_vertices', 3)
3475    outfile.createDimension('number_of_points', number_of_points)
3476    outfile.createDimension('number_of_timesteps', number_of_times)
3477
3478    # variable definitions
3479    outfile.createVariable('x', precision, ('number_of_points',))
3480    outfile.createVariable('y', precision, ('number_of_points',))
3481    outfile.createVariable('elevation', precision, ('number_of_points',))
3482
3483    #FIXME: Backwards compatibility
3484    outfile.createVariable('z', precision, ('number_of_points',))
3485    #################################
3486
3487    outfile.createVariable('volumes', Int, ('number_of_volumes',
3488                                            'number_of_vertices'))
3489
3490    outfile.createVariable('time', precision,
3491                           ('number_of_timesteps',))
3492
3493    outfile.createVariable('stage', precision,
3494                           ('number_of_timesteps',
3495                            'number_of_points'))
3496
3497    outfile.createVariable('xmomentum', precision,
3498                           ('number_of_timesteps',
3499                            'number_of_points'))
3500
3501    outfile.createVariable('ymomentum', precision,
3502                           ('number_of_timesteps',
3503                            'number_of_points'))
3504
3505    #Store
3506    from coordinate_transforms.redfearn import redfearn
3507    x = zeros(number_of_points, Float)  #Easting
3508    y = zeros(number_of_points, Float)  #Northing
3509
3510    if verbose: print 'Making triangular grid'
3511    #Get zone of 1st point.
3512    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3513   
3514    vertices = {}
3515    i = 0
3516    for k, lat in enumerate(latitudes):       
3517        for l, lon in enumerate(longitudes): 
3518
3519            vertices[l,k] = i
3520
3521            zone, easting, northing = redfearn(lat,lon)
3522
3523            msg = 'Zone boundary crossed at longitude =', lon
3524            #assert zone == refzone, msg
3525            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3526            x[i] = easting
3527            y[i] = northing
3528            i += 1
3529
3530
3531    #Construct 2 triangles per 'rectangular' element
3532    volumes = []
3533    for l in range(number_of_longitudes-1):    #X direction
3534        for k in range(number_of_latitudes-1): #Y direction
3535            v1 = vertices[l,k+1]
3536            v2 = vertices[l,k]
3537            v3 = vertices[l+1,k+1]
3538            v4 = vertices[l+1,k]
3539
3540            volumes.append([v1,v2,v3]) #Upper element
3541            volumes.append([v4,v3,v2]) #Lower element
3542
3543    volumes = array(volumes)
3544
3545    geo_ref = Geo_reference(refzone,min(x),min(y))
3546    geo_ref.write_NetCDF(outfile)
3547
3548    z = resize(bath_grid,outfile.variables['z'][:].shape)
3549    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3550    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3551    outfile.variables['z'][:] = z
3552    outfile.variables['elevation'][:] = z  #FIXME HACK
3553    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3554
3555    # do this to create an ok sww file.
3556    #outfile.variables['time'][:] = [0]   #Store time relative
3557    #outfile.variables['stage'] = z
3558    # put the next line up in the code after outfile.order = 1
3559    #number_of_times = 1
3560
3561    stage = outfile.variables['stage']
3562    xmomentum = outfile.variables['xmomentum']
3563    ymomentum = outfile.variables['ymomentum']
3564   
3565    outfile.variables['time'][:] = times   #Store time relative
3566   
3567    if verbose: print 'Converting quantities'
3568    n = number_of_times   
3569    for j in range(number_of_times):
3570        # load in files
3571        elevation_meta, elevation_grid = \
3572           _read_asc(elevation_dir + os.sep + elevation_files[j])
3573                         
3574        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3575        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3576
3577        #print "elevation_grid",elevation_grid
3578        #cut matrix to desired size
3579        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3580        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3581        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3582        #print "elevation_grid",elevation_grid
3583        # handle missing values
3584        missing = (elevation_grid == elevation_meta['NODATA_value'])
3585        if sometrue (missing):
3586            if fail_on_NaN:
3587                msg = 'File %s contains missing values'\
3588                      %(elevation_files[j])
3589                raise msg
3590            else:
3591                elevation_grid = elevation_grid*(missing==0) + \
3592                                 missing*elevation_NaN_filler
3593
3594
3595        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3596        i = 0
3597        for k in range(number_of_latitudes):      #Y direction
3598            for l in range(number_of_longitudes): #X direction
3599                w = zscale*elevation_grid[k,l] + mean_stage
3600                stage[j,i] = w
3601                h = w - z[i]
3602                xmomentum[j,i] = u_momentum_grid[k,l]*h
3603                ymomentum[j,i] = v_momentum_grid[k,l]*h
3604                i += 1
3605    outfile.close()
3606   
3607def get_min_max_indexes(latitudes,longitudes,
3608                        minlat=None, maxlat=None,
3609                        minlon=None, maxlon=None):
3610    """
3611    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3612    specified with min/max lat/long
3613
3614    Think of the latitudes and longitudes describing a 2d surface.
3615    The area returned is, if possible, just big enough to cover the
3616    inputed max/min area. (This will not be possible if the max/min area
3617    has a section outside of the latitudes/longitudes area.)
3618
3619    assume latitudess & longitudes are sorted,
3620    long - from low to high (west to east, eg 148 - 151)
3621    lat - from high to low (north to south, eg -35 - -38)
3622    """
3623
3624    # reverse order of lat, so it's in ascending order
3625    latitudes.reverse()
3626    largest_lat_index = len(latitudes)-1
3627    #Cut out a smaller extent.
3628    if minlat == None:
3629        lat_min_index = 0
3630    else:
3631        lat_min_index = searchsorted(latitudes, minlat)-1
3632        if lat_min_index <0:
3633            lat_min_index = 0
3634           
3635
3636    if maxlat == None:
3637        lat_max_index = largest_lat_index #len(latitudes)
3638    else:
3639        lat_max_index = searchsorted(latitudes, maxlat)
3640        if lat_max_index > largest_lat_index:
3641            lat_max_index = largest_lat_index
3642       
3643    if minlon == None:
3644        lon_min_index = 0
3645    else:
3646        lon_min_index = searchsorted(longitudes, minlon)-1
3647        if lon_min_index <0:
3648            lon_min_index = 0
3649       
3650    if maxlon == None:
3651        lon_max_index = len(longitudes)
3652    else:
3653        lon_max_index = searchsorted(longitudes, maxlon)
3654
3655    #Take into account that the latitude list was reversed
3656    latitudes.reverse() # Python passes by reference, need to swap back
3657    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3658                                   largest_lat_index - lat_min_index
3659    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3660    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3661   
3662    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3663       
3664   
3665def _read_asc(filename, verbose=False):
3666    """Read esri file from the following ASCII format (.asc)
3667
3668    Example:
3669
3670    ncols         3121
3671    nrows         1800
3672    xllcorner     722000
3673    yllcorner     5893000
3674    cellsize      25
3675    NODATA_value  -9999
3676    138.3698 137.4194 136.5062 135.5558 ..........
3677
3678    """
3679
3680    datafile = open(filename)
3681
3682    if verbose: print 'Reading DEM from %s' %(filename)
3683    lines = datafile.readlines()
3684    datafile.close()
3685
3686    if verbose: print 'Got', len(lines), ' lines'
3687
3688    ncols = int(lines.pop(0).split()[1].strip())
3689    nrows = int(lines.pop(0).split()[1].strip())
3690    xllcorner = float(lines.pop(0).split()[1].strip())
3691    yllcorner = float(lines.pop(0).split()[1].strip())
3692    cellsize = float(lines.pop(0).split()[1].strip())
3693    NODATA_value = float(lines.pop(0).split()[1].strip())
3694   
3695    assert len(lines) == nrows
3696   
3697    #Store data
3698    grid = []
3699   
3700    n = len(lines)
3701    for i, line in enumerate(lines):
3702        cells = line.split()
3703        assert len(cells) == ncols
3704        grid.append(array([float(x) for x in cells]))
3705    grid = array(grid)
3706
3707    return {'xllcorner':xllcorner,
3708            'yllcorner':yllcorner,
3709            'cellsize':cellsize,
3710            'NODATA_value':NODATA_value}, grid
3711   
Note: See TracBrowser for help on using the repository browser.