source: inundation/pyvolution/data_manager.py @ 2718

Last change on this file since 2718 was 2718, checked in by steve, 18 years ago
File size: 112.0 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class DataMissingValuesError(exceptions.Exception): pass
53class DataFileNotOpenError(exceptions.Exception): pass
54class DataTimeError(exceptions.Exception): pass
55class DataDomainError(exceptions.Exception): pass
56
57
58
59import os
60
61from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
62     searchsorted, zeros, allclose, around
63
64
65from coordinate_transforms.geo_reference import Geo_reference
66
67
68def make_filename(s):
69    """Transform argument string into a suitable filename
70    """
71
72    s = s.strip()
73    s = s.replace(' ', '_')
74    s = s.replace('(', '')
75    s = s.replace(')', '')
76    s = s.replace('__', '_')
77
78    return s
79
80
81def check_dir(path, verbose=None):
82    """Check that specified path exists.
83    If path does not exist it will be created if possible
84
85    USAGE:
86       checkdir(path, verbose):
87
88    ARGUMENTS:
89        path -- Directory
90        verbose -- Flag verbose output (default: None)
91
92    RETURN VALUE:
93        Verified path including trailing separator
94
95    """
96
97    import os, sys
98    import os.path
99
100    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
101        unix = 0
102    else:
103        unix = 1
104
105
106    if path[-1] != os.sep:
107        path = path + os.sep  # Add separator for directories
108
109    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
110    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
111        try:
112            exitcode=os.mkdir(path)
113
114            # Change access rights if possible
115            #
116            if unix:
117                exitcode=os.system('chmod 775 '+path)
118            else:
119                pass  # FIXME: What about acces rights under Windows?
120
121            if verbose: print 'MESSAGE: Directory', path, 'created.'
122
123        except:
124            print 'WARNING: Directory', path, 'could not be created.'
125            if unix:
126                path = '/tmp/'
127            else:
128                path = 'C:'
129
130            print 'Using directory %s instead' %path
131
132    return(path)
133
134
135
136def del_dir(path):
137    """Recursively delete directory path and all its contents
138    """
139
140    import os
141
142    if os.path.isdir(path):
143        for file in os.listdir(path):
144            X = os.path.join(path, file)
145
146
147            if os.path.isdir(X) and not os.path.islink(X):
148                del_dir(X)
149            else:
150                try:
151                    os.remove(X)
152                except:
153                    print "Could not remove file %s" %X
154
155        os.rmdir(path)
156
157
158
159def create_filename(datadir, filename, format, size=None, time=None):
160
161    import os
162    #from config import data_dir
163
164    FN = check_dir(datadir) + filename
165
166    if size is not None:
167        FN += '_size%d' %size
168
169    if time is not None:
170        FN += '_time%.2f' %time
171
172    FN += '.' + format
173    return FN
174
175
176def get_files(datadir, filename, format, size):
177    """Get all file (names) with gven name, size and format
178    """
179
180    import glob
181
182    import os
183    #from config import data_dir
184
185    dir = check_dir(datadir)
186
187    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
188    return glob.glob(pattern)
189
190
191
192#Generic class for storing output to e.g. visualisation or checkpointing
193class Data_format:
194    """Generic interface to data formats
195    """
196
197
198    def __init__(self, domain, extension, mode = 'w'):
199        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
200                                        '''   'w' (write)'''+\
201                                        '''   'r' (read)''' +\
202                                        '''   'a' (append)'''
203
204        #Create filename
205        #self.filename = create_filename(domain.get_datadir(),
206        #domain.get_name(), extension, len(domain))
207
208
209        self.filename = create_filename(domain.get_datadir(),
210            domain.get_name(), extension)
211
212        #print 'F', self.filename
213        self.timestep = 0
214        self.number_of_volumes = len(domain)
215        self.domain = domain
216
217
218        #FIXME: Should we have a general set_precision function?
219
220
221
222#Class for storing output to e.g. visualisation
223class Data_format_sww(Data_format):
224    """Interface to native NetCDF format (.sww) for storing model output
225
226    There are two kinds of data
227
228    1: Constant data: Vertex coordinates and field values. Stored once
229    2: Variable data: Conserved quantities. Stored once per timestep.
230
231    All data is assumed to reside at vertex locations.
232    """
233
234
235    def __init__(self, domain, mode = 'w',\
236                 max_size = 2000000000,
237                 recursion = False):
238        from Scientific.IO.NetCDF import NetCDFFile
239        from Numeric import Int, Float, Float32
240
241        self.precision = Float32 #Use single precision
242        if hasattr(domain, 'max_size'):
243            self.max_size = domain.max_size #file size max is 2Gig
244        else:
245            self.max_size = max_size
246        self.recursion = recursion
247        self.mode = mode
248
249        Data_format.__init__(self, domain, 'sww', mode)
250
251
252        # NetCDF file definition
253        fid = NetCDFFile(self.filename, mode)
254
255        if mode == 'w':
256
257            #Create new file
258            fid.institution = 'Geoscience Australia'
259            fid.description = 'Output from pyvolution suitable for plotting'
260
261            if domain.smooth:
262                fid.smoothing = 'Yes'
263            else:
264                fid.smoothing = 'No'
265
266            fid.order = domain.default_order
267
268            if hasattr(domain, 'texture'):
269                fid.texture = domain.texture
270        #else:
271            #    fid.texture = 'None'
272
273            #Reference point
274            #Start time in seconds since the epoch (midnight 1/1/1970)
275            #FIXME: Use Georef
276            fid.starttime = domain.starttime
277
278            # dimension definitions
279            fid.createDimension('number_of_volumes', self.number_of_volumes)
280            fid.createDimension('number_of_vertices', 3)
281
282            if domain.smooth is True:
283                fid.createDimension('number_of_points', len(domain.vertexlist))
284            else:
285                fid.createDimension('number_of_points', 3*self.number_of_volumes)
286
287            fid.createDimension('number_of_timesteps', None) #extensible
288
289            # variable definitions
290            fid.createVariable('x', self.precision, ('number_of_points',))
291            fid.createVariable('y', self.precision, ('number_of_points',))
292            fid.createVariable('elevation', self.precision, ('number_of_points',))
293            if domain.geo_reference is not None:
294                domain.geo_reference.write_NetCDF(fid)
295
296            #FIXME: Backwards compatibility
297            fid.createVariable('z', self.precision, ('number_of_points',))
298            #################################
299
300            fid.createVariable('volumes', Int, ('number_of_volumes',
301                                                'number_of_vertices'))
302
303            fid.createVariable('time', self.precision,
304                               ('number_of_timesteps',))
305
306            fid.createVariable('stage', self.precision,
307                               ('number_of_timesteps',
308                                'number_of_points'))
309
310            fid.createVariable('xmomentum', self.precision,
311                               ('number_of_timesteps',
312                                'number_of_points'))
313
314            fid.createVariable('ymomentum', self.precision,
315                               ('number_of_timesteps',
316                                'number_of_points'))
317
318        #Close
319        fid.close()
320
321
322    def store_connectivity(self):
323        """Specialisation of store_connectivity for net CDF format
324
325        Writes x,y,z coordinates of triangles constituting
326        the bed elevation.
327        """
328
329        from Scientific.IO.NetCDF import NetCDFFile
330
331        from Numeric import concatenate, Int
332
333        domain = self.domain
334
335        #Get NetCDF
336        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
337
338        # Get the variables
339        x = fid.variables['x']
340        y = fid.variables['y']
341        z = fid.variables['elevation']
342
343        volumes = fid.variables['volumes']
344
345        # Get X, Y and bed elevation Z
346        Q = domain.quantities['elevation']
347        X,Y,Z,V = Q.get_vertex_values(xy=True,
348                      precision = self.precision)
349
350
351
352        x[:] = X.astype(self.precision)
353        y[:] = Y.astype(self.precision)
354        z[:] = Z.astype(self.precision)
355
356        #FIXME: Backwards compatibility
357        z = fid.variables['z']
358        z[:] = Z.astype(self.precision)
359        ################################
360
361        volumes[:] = V.astype(volumes.typecode())
362
363        #Close
364        fid.close()
365
366    def store_timestep(self, names):
367        """Store time and named quantities to file
368        """
369        from Scientific.IO.NetCDF import NetCDFFile
370        import types
371        from time import sleep
372        from os import stat
373
374        #minimum_allowed_depth = 0.001
375        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
376        from Numeric import choose
377
378        #Get NetCDF
379        retries = 0
380        file_open = False
381        while not file_open and retries < 10:
382            try:
383                fid = NetCDFFile(self.filename, 'a') #Open existing file
384            except IOError:
385                #This could happen if someone was reading the file.
386                #In that case, wait a while and try again
387                msg = 'Warning (store_timestep): File %s could not be opened'\
388                      %self.filename
389                msg += ' - trying step %s again' %self.domain.time
390                print msg
391                retries += 1
392                sleep(1)
393            else:
394                file_open = True
395
396        if not file_open:
397            msg = 'File %s could not be opened for append' %self.filename
398            raise DataFileNotOpenError, msg
399
400
401
402        #Check to see if the file is already too big:
403        time = fid.variables['time']
404        i = len(time)+1
405        file_size = stat(self.filename)[6]
406        file_size_increase =  file_size/i
407        if file_size + file_size_increase > self.max_size*(2**self.recursion):
408            #in order to get the file name and start time correct,
409            #I change the domian.filename and domain.starttime.
410            #This is the only way to do this without changing
411            #other modules (I think).
412
413            #write a filename addon that won't break swollens reader
414            #(10.sww is bad)
415            filename_ext = '_time_%s'%self.domain.time
416            filename_ext = filename_ext.replace('.', '_')
417            #remember the old filename, then give domain a
418            #name with the extension
419            old_domain_filename = self.domain.filename
420            if not self.recursion:
421                self.domain.filename = self.domain.filename+filename_ext
422
423            #change the domain starttime to the current time
424            old_domain_starttime = self.domain.starttime
425            self.domain.starttime = self.domain.time
426
427            #build a new data_structure.
428            next_data_structure=\
429                Data_format_sww(self.domain, mode=self.mode,\
430                                max_size = self.max_size,\
431                                recursion = self.recursion+1)
432            if not self.recursion:
433                print '    file_size = %s'%file_size
434                print '    saving file to %s'%next_data_structure.filename
435            #set up the new data_structure
436            self.domain.writer = next_data_structure
437
438            #FIXME - could be cleaner to use domain.store_timestep etc.
439            next_data_structure.store_connectivity()
440            next_data_structure.store_timestep(names)
441            fid.sync()
442            fid.close()
443
444            #restore the old starttime and filename
445            self.domain.starttime = old_domain_starttime
446            self.domain.filename = old_domain_filename
447        else:
448            self.recursion = False
449            domain = self.domain
450
451            # Get the variables
452            time = fid.variables['time']
453            stage = fid.variables['stage']
454            xmomentum = fid.variables['xmomentum']
455            ymomentum = fid.variables['ymomentum']
456            i = len(time)
457
458            #Store time
459            time[i] = self.domain.time
460
461
462            if type(names) not in [types.ListType, types.TupleType]:
463                names = [names]
464
465            for name in names:
466                # Get quantity
467                Q = domain.quantities[name]
468                A,V = Q.get_vertex_values(xy = False,
469                                          precision = self.precision)
470
471                #FIXME: Make this general (see below)
472                if name == 'stage':
473                    z = fid.variables['elevation']
474                    #print z[:]
475                    #print A-z[:]
476                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
477                    stage[i,:] = A.astype(self.precision)
478                elif name == 'xmomentum':
479                    xmomentum[i,:] = A.astype(self.precision)
480                elif name == 'ymomentum':
481                    ymomentum[i,:] = A.astype(self.precision)
482
483                #As in....
484            #eval( name + '[i,:] = A.astype(self.precision)' )
485            #FIXME: But we need a UNIT test for that before refactoring
486
487
488
489            #Flush and close
490            fid.sync()
491            fid.close()
492
493
494
495#Class for handling checkpoints data
496class Data_format_cpt(Data_format):
497    """Interface to native NetCDF format (.cpt)
498    """
499
500
501    def __init__(self, domain, mode = 'w'):
502        from Scientific.IO.NetCDF import NetCDFFile
503        from Numeric import Int, Float, Float
504
505        self.precision = Float #Use full precision
506
507        Data_format.__init__(self, domain, 'sww', mode)
508
509
510        # NetCDF file definition
511        fid = NetCDFFile(self.filename, mode)
512
513        if mode == 'w':
514            #Create new file
515            fid.institution = 'Geoscience Australia'
516            fid.description = 'Checkpoint data'
517            #fid.smooth = domain.smooth
518            fid.order = domain.default_order
519
520            # dimension definitions
521            fid.createDimension('number_of_volumes', self.number_of_volumes)
522            fid.createDimension('number_of_vertices', 3)
523
524            #Store info at all vertices (no smoothing)
525            fid.createDimension('number_of_points', 3*self.number_of_volumes)
526            fid.createDimension('number_of_timesteps', None) #extensible
527
528            # variable definitions
529
530            #Mesh
531            fid.createVariable('x', self.precision, ('number_of_points',))
532            fid.createVariable('y', self.precision, ('number_of_points',))
533
534
535            fid.createVariable('volumes', Int, ('number_of_volumes',
536                                                'number_of_vertices'))
537
538            fid.createVariable('time', self.precision,
539                               ('number_of_timesteps',))
540
541            #Allocate space for all quantities
542            for name in domain.quantities.keys():
543                fid.createVariable(name, self.precision,
544                                   ('number_of_timesteps',
545                                    'number_of_points'))
546
547        #Close
548        fid.close()
549
550
551    def store_checkpoint(self):
552        """
553        Write x,y coordinates of triangles.
554        Write connectivity (
555        constituting
556        the bed elevation.
557        """
558
559        from Scientific.IO.NetCDF import NetCDFFile
560
561        from Numeric import concatenate
562
563        domain = self.domain
564
565        #Get NetCDF
566        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
567
568        # Get the variables
569        x = fid.variables['x']
570        y = fid.variables['y']
571
572        volumes = fid.variables['volumes']
573
574        # Get X, Y and bed elevation Z
575        Q = domain.quantities['elevation']
576        X,Y,Z,V = Q.get_vertex_values(xy=True,
577                      precision = self.precision)
578
579
580
581        x[:] = X.astype(self.precision)
582        y[:] = Y.astype(self.precision)
583        z[:] = Z.astype(self.precision)
584
585        volumes[:] = V
586
587        #Close
588        fid.close()
589
590
591    def store_timestep(self, name):
592        """Store time and named quantity to file
593        """
594        from Scientific.IO.NetCDF import NetCDFFile
595        from time import sleep
596
597        #Get NetCDF
598        retries = 0
599        file_open = False
600        while not file_open and retries < 10:
601            try:
602                fid = NetCDFFile(self.filename, 'a') #Open existing file
603            except IOError:
604                #This could happen if someone was reading the file.
605                #In that case, wait a while and try again
606                msg = 'Warning (store_timestep): File %s could not be opened'\
607                  %self.filename
608                msg += ' - trying again'
609                print msg
610                retries += 1
611                sleep(1)
612            else:
613                file_open = True
614
615        if not file_open:
616            msg = 'File %s could not be opened for append' %self.filename
617            raise DataFileNotOPenError, msg
618
619
620        domain = self.domain
621
622        # Get the variables
623        time = fid.variables['time']
624        stage = fid.variables['stage']
625        i = len(time)
626
627        #Store stage
628        time[i] = self.domain.time
629
630        # Get quantity
631        Q = domain.quantities[name]
632        A,V = Q.get_vertex_values(xy=False,
633                  precision = self.precision)
634
635        stage[i,:] = A.astype(self.precision)
636
637        #Flush and close
638        fid.sync()
639        fid.close()
640
641
642
643
644
645#Function for storing xya output
646#FIXME Not done yet for this version
647class Data_format_xya(Data_format):
648    """Generic interface to data formats
649    """
650
651
652    def __init__(self, domain, mode = 'w'):
653        from Scientific.IO.NetCDF import NetCDFFile
654        from Numeric import Int, Float, Float32
655
656        self.precision = Float32 #Use single precision
657
658        Data_format.__init__(self, domain, 'xya', mode)
659
660
661
662    #FIXME -This is the old xya format
663    def store_all(self):
664        """Specialisation of store all for xya format
665
666        Writes x,y,z coordinates of triangles constituting
667        the bed elevation.
668        """
669
670        from Numeric import concatenate
671
672        domain = self.domain
673
674        fd = open(self.filename, 'w')
675
676
677        if domain.smooth is True:
678            number_of_points =  len(domain.vertexlist)
679        else:
680            number_of_points = 3*self.number_of_volumes
681
682        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
683
684        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
685                 " # <vertex #> <x> <y> [attributes]" + "\n")
686
687
688        # Get X, Y, bed elevation and friction (index=0,1)
689        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
690                                           indices = (0,1), precision = self.precision)
691
692        bed_eles = A[:,0]
693        fricts = A[:,1]
694
695        # Get stage (index=0)
696        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
697                                       indices = (0,), precision = self.precision)
698
699        stages = B[:,0]
700
701        #<vertex #> <x> <y> [attributes]
702        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
703                                               stages, fricts):
704
705            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
706            fd.write(s)
707
708        #close
709        fd.close()
710
711
712    def store_timestep(self, t, V0, V1, V2):
713        """Store time, water heights (and momentums) to file
714        """
715        pass
716
717
718#Auxiliary
719def write_obj(filename,x,y,z):
720    """Store x,y,z vectors into filename (obj format)
721       Vectors are assumed to have dimension (M,3) where
722       M corresponds to the number elements.
723       triangles are assumed to be disconnected
724
725       The three numbers in each vector correspond to three vertices,
726
727       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
728
729    """
730    #print 'Writing obj to %s' % filename
731
732    import os.path
733
734    root, ext = os.path.splitext(filename)
735    if ext == '.obj':
736        FN = filename
737    else:
738        FN = filename + '.obj'
739
740
741    outfile = open(FN, 'wb')
742    outfile.write("# Triangulation as an obj file\n")
743
744    M, N = x.shape
745    assert N==3  #Assuming three vertices per element
746
747    for i in range(M):
748        for j in range(N):
749            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
750
751    for i in range(M):
752        base = i*N
753        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
754
755    outfile.close()
756
757
758#########################################################
759#Conversion routines
760########################################################
761
762def sww2obj(basefilename, size):
763    """Convert netcdf based data output to obj
764    """
765    from Scientific.IO.NetCDF import NetCDFFile
766
767    from Numeric import Float, zeros
768
769    #Get NetCDF
770    FN = create_filename('.', basefilename, 'sww', size)
771    print 'Reading from ', FN
772    fid = NetCDFFile(FN, 'r')  #Open existing file for read
773
774
775    # Get the variables
776    x = fid.variables['x']
777    y = fid.variables['y']
778    z = fid.variables['elevation']
779    time = fid.variables['time']
780    stage = fid.variables['stage']
781
782    M = size  #Number of lines
783    xx = zeros((M,3), Float)
784    yy = zeros((M,3), Float)
785    zz = zeros((M,3), Float)
786
787    for i in range(M):
788        for j in range(3):
789            xx[i,j] = x[i+j*M]
790            yy[i,j] = y[i+j*M]
791            zz[i,j] = z[i+j*M]
792
793    #Write obj for bathymetry
794    FN = create_filename('.', basefilename, 'obj', size)
795    write_obj(FN,xx,yy,zz)
796
797
798    #Now read all the data with variable information, combine with
799    #x,y info and store as obj
800
801    for k in range(len(time)):
802        t = time[k]
803        print 'Processing timestep %f' %t
804
805        for i in range(M):
806            for j in range(3):
807                zz[i,j] = stage[k,i+j*M]
808
809
810        #Write obj for variable data
811        #FN = create_filename(basefilename, 'obj', size, time=t)
812        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
813        write_obj(FN,xx,yy,zz)
814
815
816def dat2obj(basefilename):
817    """Convert line based data output to obj
818    FIXME: Obsolete?
819    """
820
821    import glob, os
822    from config import data_dir
823
824
825    #Get bathymetry and x,y's
826    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
827
828    from Numeric import zeros, Float
829
830    M = len(lines)  #Number of lines
831    x = zeros((M,3), Float)
832    y = zeros((M,3), Float)
833    z = zeros((M,3), Float)
834
835    ##i = 0
836    for i, line in enumerate(lines):
837        tokens = line.split()
838        values = map(float,tokens)
839
840        for j in range(3):
841            x[i,j] = values[j*3]
842            y[i,j] = values[j*3+1]
843            z[i,j] = values[j*3+2]
844
845        ##i += 1
846
847
848    #Write obj for bathymetry
849    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
850
851
852    #Now read all the data files with variable information, combine with
853    #x,y info
854    #and store as obj
855
856    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
857
858    for filename in files:
859        print 'Processing %s' % filename
860
861        lines = open(data_dir+os.sep+filename,'r').readlines()
862        assert len(lines) == M
863        root, ext = os.path.splitext(filename)
864
865        #Get time from filename
866        i0 = filename.find('_time=')
867        if i0 == -1:
868            #Skip bathymetry file
869            continue
870
871        i0 += 6  #Position where time starts
872        i1 = filename.find('.dat')
873
874        if i1 > i0:
875            t = float(filename[i0:i1])
876        else:
877            raise DataTimeError, 'Hmmmm'
878
879
880
881        ##i = 0
882        for i, line in enumerate(lines):
883            tokens = line.split()
884            values = map(float,tokens)
885
886            for j in range(3):
887                z[i,j] = values[j]
888
889            ##i += 1
890
891        #Write obj for variable data
892        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
893
894
895def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
896    """Read netcdf filename1, pick timesteps first:step:last and save to
897    nettcdf file filename2
898    """
899    from Scientific.IO.NetCDF import NetCDFFile
900
901    #Get NetCDF
902    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
903    outfile = NetCDFFile(filename2, 'w')  #Open new file
904
905
906    #Copy dimensions
907    for d in infile.dimensions:
908        outfile.createDimension(d, infile.dimensions[d])
909
910    for name in infile.variables:
911        var = infile.variables[name]
912        outfile.createVariable(name, var.typecode(), var.dimensions)
913
914
915    #Copy the static variables
916    for name in infile.variables:
917        if name == 'time' or name == 'stage':
918            pass
919        else:
920            #Copy
921            outfile.variables[name][:] = infile.variables[name][:]
922
923    #Copy selected timesteps
924    time = infile.variables['time']
925    stage = infile.variables['stage']
926
927    newtime = outfile.variables['time']
928    newstage = outfile.variables['stage']
929
930    if last is None:
931        last = len(time)
932
933    selection = range(first, last, step)
934    for i, j in enumerate(selection):
935        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
936        newtime[i] = time[j]
937        newstage[i,:] = stage[j,:]
938
939    #Close
940    infile.close()
941    outfile.close()
942
943
944#Get data objects
945def get_dataobject(domain, mode='w'):
946    """Return instance of class of given format using filename
947    """
948
949    cls = eval('Data_format_%s' %domain.format)
950    return cls(domain, mode)
951
952def xya2pts(basename_in, basename_out=None, verbose=False,
953            #easting_min=None, easting_max=None,
954            #northing_min=None, northing_max=None,
955            stride = 1,
956            attribute_name = 'elevation',
957            z_func = None):
958    """Read points data from ascii (.xya)
959
960    Example:
961
962              x(m)        y(m)        z(m)
963         0.00000e+00  0.00000e+00  1.3535000e-01
964         0.00000e+00  1.40000e-02  1.3535000e-01
965
966
967
968    Convert to NetCDF pts format which is
969
970    points:  (Nx2) Float array
971    elevation: N Float array
972
973    Only lines that contain three numeric values are processed
974
975    If z_func is specified, it will be applied to the third field
976    """
977
978    import os
979    #from Scientific.IO.NetCDF import NetCDFFile
980    from Numeric import Float, arrayrange, concatenate
981
982    root, ext = os.path.splitext(basename_in)
983
984    if ext == '': ext = '.xya'
985
986    #Get NetCDF
987    infile = open(root + ext, 'r')  #Open existing xya file for read
988
989    if verbose: print 'Reading xya points from %s' %(root + ext)
990
991    points = []
992    attribute = []
993    for i, line in enumerate(infile.readlines()):
994
995        if i % stride != 0: continue
996
997        fields = line.split()
998
999        try:
1000            assert len(fields) == 3
1001        except:
1002            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1003
1004        try:
1005            x = float( fields[0] )
1006            y = float( fields[1] )
1007            z = float( fields[2] )
1008        except:
1009            continue
1010
1011        points.append( [x, y] )
1012
1013        if callable(z_func):
1014            attribute.append(z_func(z))
1015        else:
1016            attribute.append(z)
1017
1018
1019    #Get output file
1020    if basename_out == None:
1021        ptsname = root + '.pts'
1022    else:
1023        ptsname = basename_out + '.pts'
1024
1025    if verbose: print 'Store to NetCDF file %s' %ptsname
1026    write_ptsfile(ptsname, points, attribute, attribute_name)
1027
1028
1029
1030######Obsoleted by export_points in load_mesh
1031def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1032                  zone=None, xllcorner=None, yllcorner=None):
1033    """Write points and associated attribute to pts (NetCDF) format
1034    """
1035
1036    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1037
1038    from Numeric import Float
1039
1040    if attribute_name is None:
1041        attribute_name = 'attribute'
1042
1043
1044    from Scientific.IO.NetCDF import NetCDFFile
1045
1046    # NetCDF file definition
1047    outfile = NetCDFFile(ptsname, 'w')
1048
1049
1050    #Create new file
1051    outfile.institution = 'Geoscience Australia'
1052    outfile.description = 'NetCDF pts format for compact and '\
1053                          'portable storage of spatial point data'
1054
1055
1056    #Georeferencing
1057    from coordinate_transforms.geo_reference import Geo_reference
1058    if zone is None:
1059        assert xllcorner is None, 'xllcorner must be None'
1060        assert yllcorner is None, 'yllcorner must be None'
1061        Geo_reference().write_NetCDF(outfile)
1062    else:
1063        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1064
1065
1066
1067    outfile.createDimension('number_of_points', len(points))
1068    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1069
1070    # variable definitions
1071    outfile.createVariable('points', Float, ('number_of_points',
1072                                             'number_of_dimensions'))
1073    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1074
1075    # Get handles to the variables
1076    nc_points = outfile.variables['points']
1077    nc_attribute = outfile.variables[attribute_name]
1078
1079    #Store data
1080    nc_points[:, :] = points
1081    nc_attribute[:] = attribute
1082
1083    outfile.close()
1084
1085
1086def dem2pts(basename_in, basename_out=None,
1087            easting_min=None, easting_max=None,
1088            northing_min=None, northing_max=None,
1089            use_cache=False, verbose=False,):
1090    """Read Digitial Elevation model from the following NetCDF format (.dem)
1091
1092    Example:
1093
1094    ncols         3121
1095    nrows         1800
1096    xllcorner     722000
1097    yllcorner     5893000
1098    cellsize      25
1099    NODATA_value  -9999
1100    138.3698 137.4194 136.5062 135.5558 ..........
1101
1102    Convert to NetCDF pts format which is
1103
1104    points:  (Nx2) Float array
1105    elevation: N Float array
1106    """
1107
1108
1109
1110    kwargs = {'basename_out': basename_out,
1111              'easting_min': easting_min,
1112              'easting_max': easting_max,
1113              'northing_min': northing_min,
1114              'northing_max': northing_max,
1115              'verbose': verbose}
1116
1117    if use_cache is True:
1118        from caching import cache
1119        result = cache(_dem2pts, basename_in, kwargs,
1120                       dependencies = [basename_in + '.dem'],
1121                       verbose = verbose)
1122
1123    else:
1124        result = apply(_dem2pts, [basename_in], kwargs)
1125
1126    return result
1127
1128
1129def _dem2pts(basename_in, basename_out=None, verbose=False,
1130            easting_min=None, easting_max=None,
1131            northing_min=None, northing_max=None):
1132    """Read Digitial Elevation model from the following NetCDF format (.dem)
1133
1134    Internal function. See public function dem2pts for details.
1135    """
1136
1137    #FIXME: Can this be written feasibly using write_pts?
1138
1139    import os
1140    from Scientific.IO.NetCDF import NetCDFFile
1141    from Numeric import Float, zeros, reshape, sum
1142
1143    root = basename_in
1144
1145    #Get NetCDF
1146    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1147
1148    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1149
1150    ncols = infile.ncols[0]
1151    nrows = infile.nrows[0]
1152    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1153    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1154    cellsize = infile.cellsize[0]
1155    NODATA_value = infile.NODATA_value[0]
1156    dem_elevation = infile.variables['elevation']
1157
1158    zone = infile.zone[0]
1159    false_easting = infile.false_easting[0]
1160    false_northing = infile.false_northing[0]
1161
1162    #Text strings
1163    projection = infile.projection
1164    datum = infile.datum
1165    units = infile.units
1166
1167
1168    #Get output file
1169    if basename_out == None:
1170        ptsname = root + '.pts'
1171    else:
1172        ptsname = basename_out + '.pts'
1173
1174    if verbose: print 'Store to NetCDF file %s' %ptsname
1175    # NetCDF file definition
1176    outfile = NetCDFFile(ptsname, 'w')
1177
1178    #Create new file
1179    outfile.institution = 'Geoscience Australia'
1180    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1181                      'of spatial point data'
1182    #assign default values
1183    if easting_min is None: easting_min = xllcorner
1184    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1185    if northing_min is None: northing_min = yllcorner
1186    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1187
1188    #compute offsets to update georeferencing
1189    easting_offset = xllcorner - easting_min
1190    northing_offset = yllcorner - northing_min
1191
1192    #Georeferencing
1193    outfile.zone = zone
1194    outfile.xllcorner = easting_min #Easting of lower left corner
1195    outfile.yllcorner = northing_min #Northing of lower left corner
1196    outfile.false_easting = false_easting
1197    outfile.false_northing = false_northing
1198
1199    outfile.projection = projection
1200    outfile.datum = datum
1201    outfile.units = units
1202
1203
1204    #Grid info (FIXME: probably not going to be used, but heck)
1205    outfile.ncols = ncols
1206    outfile.nrows = nrows
1207
1208    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1209    totalnopoints = nrows*ncols
1210
1211    # calculating number of NODATA_values for each row in clipped region
1212    #FIXME: use array operations to do faster
1213    nn = 0
1214    k = 0
1215    i1_0 = 0
1216    j1_0 = 0
1217    thisj = 0
1218    thisi = 0
1219    for i in range(nrows):
1220        y = (nrows-i-1)*cellsize + yllcorner
1221        for j in range(ncols):
1222            x = j*cellsize + xllcorner
1223            if easting_min <= x <= easting_max and \
1224               northing_min <= y <= northing_max:
1225                thisj = j
1226                thisi = i
1227                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1228
1229                if k == 0:
1230                    i1_0 = i
1231                    j1_0 = j
1232                k += 1
1233
1234    index1 = j1_0
1235    index2 = thisj
1236
1237    # dimension definitions
1238    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1239    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1240
1241    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1242    nopoints = clippednopoints-nn
1243
1244    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1245
1246    if verbose and nn > 0:
1247        print 'There are %d values in the elevation' %totalnopoints
1248        print 'There are %d values in the clipped elevation' %clippednopoints
1249        print 'There are %d NODATA_values in the clipped elevation' %nn
1250
1251    outfile.createDimension('number_of_points', nopoints)
1252    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1253
1254    # variable definitions
1255    outfile.createVariable('points', Float, ('number_of_points',
1256                                             'number_of_dimensions'))
1257    outfile.createVariable('elevation', Float, ('number_of_points',))
1258
1259    # Get handles to the variables
1260    points = outfile.variables['points']
1261    elevation = outfile.variables['elevation']
1262
1263    lenv = index2-index1+1
1264    #Store data
1265    global_index = 0
1266    #for i in range(nrows):
1267    for i in range(i1_0,thisi+1,1):
1268        if verbose and i%((nrows+10)/10)==0:
1269            print 'Processing row %d of %d' %(i, nrows)
1270
1271        lower_index = global_index
1272
1273        v = dem_elevation_r[i,index1:index2+1]
1274        no_NODATA = sum(v == NODATA_value)
1275        if no_NODATA > 0:
1276            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1277        else:
1278            newcols = lenv #ncols_in_bounding_box
1279
1280        telev = zeros(newcols, Float)
1281        tpoints = zeros((newcols, 2), Float)
1282
1283        local_index = 0
1284
1285        y = (nrows-i-1)*cellsize + yllcorner
1286        #for j in range(ncols):
1287        for j in range(j1_0,index2+1,1):
1288
1289            x = j*cellsize + xllcorner
1290            if easting_min <= x <= easting_max and \
1291               northing_min <= y <= northing_max and \
1292               dem_elevation_r[i,j] <> NODATA_value:
1293                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1294                telev[local_index] = dem_elevation_r[i, j]
1295                global_index += 1
1296                local_index += 1
1297
1298        upper_index = global_index
1299
1300        if upper_index == lower_index + newcols:
1301            points[lower_index:upper_index, :] = tpoints
1302            elevation[lower_index:upper_index] = telev
1303
1304    assert global_index == nopoints, 'index not equal to number of points'
1305
1306    infile.close()
1307    outfile.close()
1308
1309
1310
1311def _read_hecras_cross_sections(lines):
1312    """Return block of surface lines for each cross section
1313    Starts with SURFACE LINE,
1314    Ends with END CROSS-SECTION
1315    """
1316
1317    points = []
1318
1319    reading_surface = False
1320    for i, line in enumerate(lines):
1321
1322        if len(line.strip()) == 0:    #Ignore blanks
1323            continue
1324
1325        if lines[i].strip().startswith('SURFACE LINE'):
1326            reading_surface = True
1327            continue
1328
1329        if lines[i].strip().startswith('END') and reading_surface:
1330            yield points
1331            reading_surface = False
1332            points = []
1333
1334        if reading_surface:
1335            fields = line.strip().split(',')
1336            easting = float(fields[0])
1337            northing = float(fields[1])
1338            elevation = float(fields[2])
1339            points.append([easting, northing, elevation])
1340
1341
1342
1343
1344def hecras_cross_sections2pts(basename_in,
1345                              basename_out=None,
1346                              verbose=False):
1347    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1348
1349    Example:
1350
1351
1352# RAS export file created on Mon 15Aug2005 11:42
1353# by HEC-RAS Version 3.1.1
1354
1355BEGIN HEADER:
1356  UNITS: METRIC
1357  DTM TYPE: TIN
1358  DTM: v:\1\cit\perth_topo\river_tin
1359  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1360  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1361  MAP PROJECTION: UTM
1362  PROJECTION ZONE: 50
1363  DATUM: AGD66
1364  VERTICAL DATUM:
1365  NUMBER OF REACHES:  19
1366  NUMBER OF CROSS-SECTIONS:  14206
1367END HEADER:
1368
1369
1370Only the SURFACE LINE data of the following form will be utilised
1371
1372  CROSS-SECTION:
1373    STREAM ID:Southern-Wungong
1374    REACH ID:Southern-Wungong
1375    STATION:19040.*
1376    CUT LINE:
1377      405548.671603161 , 6438142.7594925
1378      405734.536092045 , 6438326.10404912
1379      405745.130459356 , 6438331.48627354
1380      405813.89633823 , 6438368.6272789
1381    SURFACE LINE:
1382     405548.67,   6438142.76,   35.37
1383     405552.24,   6438146.28,   35.41
1384     405554.78,   6438148.78,   35.44
1385     405555.80,   6438149.79,   35.44
1386     405559.37,   6438153.31,   35.45
1387     405560.88,   6438154.81,   35.44
1388     405562.93,   6438156.83,   35.42
1389     405566.50,   6438160.35,   35.38
1390     405566.99,   6438160.83,   35.37
1391     ...
1392   END CROSS-SECTION
1393
1394    Convert to NetCDF pts format which is
1395
1396    points:  (Nx2) Float array
1397    elevation: N Float array
1398    """
1399
1400    #FIXME: Can this be written feasibly using write_pts?
1401
1402    import os
1403    from Scientific.IO.NetCDF import NetCDFFile
1404    from Numeric import Float, zeros, reshape
1405
1406    root = basename_in
1407
1408    #Get ASCII file
1409    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1410
1411    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1412
1413    lines = infile.readlines()
1414    infile.close()
1415
1416    if verbose: print 'Converting to pts format'
1417
1418    i = 0
1419    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1420        i += 1
1421
1422    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1423    i += 1
1424
1425    assert lines[i].strip().upper().startswith('UNITS:')
1426    units = lines[i].strip().split()[1]
1427    i += 1
1428
1429    assert lines[i].strip().upper().startswith('DTM TYPE:')
1430    i += 1
1431
1432    assert lines[i].strip().upper().startswith('DTM:')
1433    i += 1
1434
1435    assert lines[i].strip().upper().startswith('STREAM')
1436    i += 1
1437
1438    assert lines[i].strip().upper().startswith('CROSS')
1439    i += 1
1440
1441    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1442    projection = lines[i].strip().split(':')[1]
1443    i += 1
1444
1445    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1446    zone = int(lines[i].strip().split(':')[1])
1447    i += 1
1448
1449    assert lines[i].strip().upper().startswith('DATUM:')
1450    datum = lines[i].strip().split(':')[1]
1451    i += 1
1452
1453    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1454    i += 1
1455
1456    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1457    i += 1
1458
1459    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1460    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1461    i += 1
1462
1463
1464    #Now read all points
1465    points = []
1466    elevation = []
1467    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1468        for k, entry in enumerate(entries):
1469            points.append(entry[:2])
1470            elevation.append(entry[2])
1471
1472
1473    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1474          %(j+1, number_of_cross_sections)
1475    assert j+1 == number_of_cross_sections, msg
1476
1477    #Get output file
1478    if basename_out == None:
1479        ptsname = root + '.pts'
1480    else:
1481        ptsname = basename_out + '.pts'
1482
1483    #FIXME (DSG-ON): use loadASCII export_points_file
1484    if verbose: print 'Store to NetCDF file %s' %ptsname
1485    # NetCDF file definition
1486    outfile = NetCDFFile(ptsname, 'w')
1487
1488    #Create new file
1489    outfile.institution = 'Geoscience Australia'
1490    outfile.description = 'NetCDF pts format for compact and portable ' +\
1491                          'storage of spatial point data derived from HEC-RAS'
1492
1493    #Georeferencing
1494    outfile.zone = zone
1495    outfile.xllcorner = 0.0
1496    outfile.yllcorner = 0.0
1497    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1498    outfile.false_northing = 1000000
1499
1500    outfile.projection = projection
1501    outfile.datum = datum
1502    outfile.units = units
1503
1504
1505    outfile.createDimension('number_of_points', len(points))
1506    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1507
1508    # variable definitions
1509    outfile.createVariable('points', Float, ('number_of_points',
1510                                             'number_of_dimensions'))
1511    outfile.createVariable('elevation', Float, ('number_of_points',))
1512
1513    # Get handles to the variables
1514    outfile.variables['points'][:] = points
1515    outfile.variables['elevation'][:] = elevation
1516
1517    outfile.close()
1518
1519
1520
1521def sww2dem(basename_in, basename_out = None,
1522            quantity = None,
1523            timestep = None,
1524            reduction = None,
1525            cellsize = 10,
1526            NODATA_value = -9999,
1527            easting_min = None,
1528            easting_max = None,
1529            northing_min = None,
1530            northing_max = None,
1531            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1532            verbose = False,
1533            origin = None,
1534            datum = 'WGS84',
1535        format = 'ers'):
1536
1537    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1538
1539    Example (ASC):
1540
1541    ncols         3121
1542    nrows         1800
1543    xllcorner     722000
1544    yllcorner     5893000
1545    cellsize      25
1546    NODATA_value  -9999
1547    138.3698 137.4194 136.5062 135.5558 ..........
1548
1549    Also write accompanying file with same basename_in but extension .prj
1550    used to fix the UTM zone, datum, false northings and eastings.
1551
1552    The prj format is assumed to be as
1553
1554    Projection    UTM
1555    Zone          56
1556    Datum         WGS84
1557    Zunits        NO
1558    Units         METERS
1559    Spheroid      WGS84
1560    Xshift        0.0000000000
1561    Yshift        10000000.0000000000
1562    Parameters
1563
1564
1565    The parameter quantity must be the name of an existing quantity or
1566    an expression involving existing quantities. The default is
1567    'elevation'.
1568
1569    if timestep (an index) is given, output quantity at that timestep
1570
1571    if reduction is given use that to reduce quantity over all timesteps.
1572
1573    datum
1574
1575    format can be either 'asc' or 'ers'
1576    """
1577
1578    import sys
1579    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1580    from Numeric import array2string
1581
1582    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1583    from util import apply_expression_to_dictionary
1584
1585    msg = 'Format must be either asc or ers'
1586    assert format.lower() in ['asc', 'ers'], msg
1587
1588
1589    false_easting = 500000
1590    false_northing = 10000000
1591
1592    if quantity is None:
1593        quantity = 'elevation'
1594
1595    if reduction is None:
1596        reduction = max
1597
1598    if basename_out is None:
1599        basename_out = basename_in + '_%s' %quantity
1600
1601    swwfile = basename_in + '.sww'
1602    demfile = basename_out + '.' + format
1603    # Note the use of a .ers extension is optional (write_ermapper_grid will
1604    # deal with either option
1605
1606    #Read sww file
1607    if verbose: print 'Reading from %s' %swwfile
1608    from Scientific.IO.NetCDF import NetCDFFile
1609    fid = NetCDFFile(swwfile)
1610
1611    #Get extent and reference
1612    x = fid.variables['x'][:]
1613    y = fid.variables['y'][:]
1614    volumes = fid.variables['volumes'][:]
1615
1616    number_of_timesteps = fid.dimensions['number_of_timesteps']
1617    number_of_points = fid.dimensions['number_of_points']
1618    if origin is None:
1619
1620        #Get geo_reference
1621        #sww files don't have to have a geo_ref
1622        try:
1623            geo_reference = Geo_reference(NetCDFObject=fid)
1624        except AttributeError, e:
1625            geo_reference = Geo_reference() #Default georef object
1626
1627        xllcorner = geo_reference.get_xllcorner()
1628        yllcorner = geo_reference.get_yllcorner()
1629        zone = geo_reference.get_zone()
1630    else:
1631        zone = origin[0]
1632        xllcorner = origin[1]
1633        yllcorner = origin[2]
1634
1635
1636
1637    #FIXME: Refactor using code from file_function.statistics
1638    #Something like print swwstats(swwname)
1639    if verbose:
1640        x = fid.variables['x'][:]
1641        y = fid.variables['y'][:]
1642        times = fid.variables['time'][:]
1643        print '------------------------------------------------'
1644        print 'Statistics of SWW file:'
1645        print '  Name: %s' %swwfile
1646        print '  Reference:'
1647        print '    Lower left corner: [%f, %f]'\
1648              %(xllcorner, yllcorner)
1649        print '    Start time: %f' %fid.starttime[0]
1650        print '  Extent:'
1651        print '    x [m] in [%f, %f], len(x) == %d'\
1652              %(min(x.flat), max(x.flat), len(x.flat))
1653        print '    y [m] in [%f, %f], len(y) == %d'\
1654              %(min(y.flat), max(y.flat), len(y.flat))
1655        print '    t [s] in [%f, %f], len(t) == %d'\
1656              %(min(times), max(times), len(times))
1657        print '  Quantities [SI units]:'
1658        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1659            q = fid.variables[name][:].flat
1660            print '    %s in [%f, %f]' %(name, min(q), max(q))
1661
1662
1663
1664
1665
1666    #Get quantity and reduce if applicable
1667    if verbose: print 'Processing quantity %s' %quantity
1668
1669    #Turn NetCDF objects into Numeric arrays
1670    quantity_dict = {}
1671    for name in fid.variables.keys():
1672        quantity_dict[name] = fid.variables[name][:]
1673
1674
1675    q = apply_expression_to_dictionary(quantity, quantity_dict)
1676
1677
1678
1679    if len(q.shape) == 2:
1680        #q has a time component and needs to be reduced along
1681        #the temporal dimension
1682        if verbose: print 'Reducing quantity %s' %quantity
1683        q_reduced = zeros( number_of_points, Float )
1684
1685        for k in range(number_of_points):
1686            q_reduced[k] = reduction( q[:,k] )
1687
1688        q = q_reduced
1689
1690    #Post condition: Now q has dimension: number_of_points
1691    assert len(q.shape) == 1
1692    assert q.shape[0] == number_of_points
1693
1694
1695    if verbose:
1696        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1697
1698
1699    #Create grid and update xll/yll corner and x,y
1700
1701    #Relative extent
1702    if easting_min is None:
1703        xmin = min(x)
1704    else:
1705        xmin = easting_min - xllcorner
1706
1707    if easting_max is None:
1708        xmax = max(x)
1709    else:
1710        xmax = easting_max - xllcorner
1711
1712    if northing_min is None:
1713        ymin = min(y)
1714    else:
1715        ymin = northing_min - yllcorner
1716
1717    if northing_max is None:
1718        ymax = max(y)
1719    else:
1720        ymax = northing_max - yllcorner
1721
1722
1723
1724    if verbose: print 'Creating grid'
1725    ncols = int((xmax-xmin)/cellsize)+1
1726    nrows = int((ymax-ymin)/cellsize)+1
1727
1728
1729    #New absolute reference and coordinates
1730    newxllcorner = xmin+xllcorner
1731    newyllcorner = ymin+yllcorner
1732
1733    x = x+xllcorner-newxllcorner
1734    y = y+yllcorner-newyllcorner
1735
1736    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1737    assert len(vertex_points.shape) == 2
1738
1739
1740
1741    grid_points = zeros ( (ncols*nrows, 2), Float )
1742
1743
1744    for i in xrange(nrows):
1745        if format.lower() == 'asc':
1746        yg = i*cellsize
1747    else:
1748        #this will flip the order of the y values for ers
1749            yg = (nrows-i)*cellsize
1750
1751        for j in xrange(ncols):
1752            xg = j*cellsize
1753            k = i*ncols + j
1754
1755            grid_points[k,0] = xg
1756            grid_points[k,1] = yg
1757
1758    #Interpolate
1759    from least_squares import Interpolation
1760
1761
1762    #FIXME: This should be done with precrop = True (?), otherwise it'll
1763    #take forever. With expand_search  set to False, some grid points might
1764    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1765
1766    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1767                           precrop = False,
1768                           expand_search = expand_search,
1769                           verbose = verbose)
1770
1771
1772
1773    #Interpolate using quantity values
1774    if verbose: print 'Interpolating'
1775    grid_values = interp.interpolate(q).flat
1776
1777
1778    if verbose:
1779        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1780                                                      max(grid_values))
1781
1782    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1783    P = interp.mesh.get_boundary_polygon()
1784    outside_indices = outside_polygon(grid_points, P, closed=True)
1785
1786    for i in outside_indices:
1787        grid_values[i] = NODATA_value
1788
1789
1790
1791
1792    if format.lower() == 'ers':
1793        # setup ERS header information
1794        grid_values = reshape(grid_values,(nrows, ncols))
1795        header = {}
1796        header['datum'] = '"' + datum + '"'
1797        # FIXME The use of hardwired UTM and zone number needs to be made optional
1798        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1799        header['projection'] = '"UTM-' + str(zone) + '"'
1800        header['coordinatetype'] = 'EN'
1801        if header['coordinatetype'] == 'LL':
1802            header['longitude'] = str(newxllcorner)
1803            header['latitude'] = str(newyllcorner)
1804        elif header['coordinatetype'] == 'EN':
1805            header['eastings'] = str(newxllcorner)
1806            header['northings'] = str(newyllcorner)
1807        header['nullcellvalue'] = str(NODATA_value)
1808        header['xdimension'] = str(cellsize)
1809        header['ydimension'] = str(cellsize)
1810        header['value'] = '"' + quantity + '"'
1811        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1812
1813
1814        #Write
1815        if verbose: print 'Writing %s' %demfile
1816        import ermapper_grids
1817        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1818
1819        fid.close()
1820    else:
1821        #Write to Ascii format
1822
1823        #Write prj file
1824        prjfile = basename_out + '.prj'
1825
1826        if verbose: print 'Writing %s' %prjfile
1827        prjid = open(prjfile, 'w')
1828        prjid.write('Projection    %s\n' %'UTM')
1829        prjid.write('Zone          %d\n' %zone)
1830        prjid.write('Datum         %s\n' %datum)
1831        prjid.write('Zunits        NO\n')
1832        prjid.write('Units         METERS\n')
1833        prjid.write('Spheroid      %s\n' %datum)
1834        prjid.write('Xshift        %d\n' %false_easting)
1835        prjid.write('Yshift        %d\n' %false_northing)
1836        prjid.write('Parameters\n')
1837        prjid.close()
1838
1839
1840
1841        if verbose: print 'Writing %s' %demfile
1842
1843        ascid = open(demfile, 'w')
1844
1845        ascid.write('ncols         %d\n' %ncols)
1846        ascid.write('nrows         %d\n' %nrows)
1847        ascid.write('xllcorner     %d\n' %newxllcorner)
1848        ascid.write('yllcorner     %d\n' %newyllcorner)
1849        ascid.write('cellsize      %f\n' %cellsize)
1850        ascid.write('NODATA_value  %d\n' %NODATA_value)
1851
1852
1853        #Get bounding polygon from mesh
1854        #P = interp.mesh.get_boundary_polygon()
1855        #inside_indices = inside_polygon(grid_points, P)
1856
1857        for i in range(nrows):
1858            if verbose and i%((nrows+10)/10)==0:
1859                print 'Doing row %d of %d' %(i, nrows)
1860
1861            base_index = (nrows-i-1)*ncols
1862
1863            slice = grid_values[base_index:base_index+ncols]
1864            s = array2string(slice, max_line_width=sys.maxint)
1865            ascid.write(s[1:-1] + '\n')
1866
1867
1868            #print
1869            #for j in range(ncols):
1870            #    index = base_index+j#
1871            #    print grid_values[index],
1872            #    ascid.write('%f ' %grid_values[index])
1873            #ascid.write('\n')
1874
1875        #Close
1876        ascid.close()
1877        fid.close()
1878
1879#Backwards compatibility
1880def sww2asc(basename_in, basename_out = None,
1881            quantity = None,
1882            timestep = None,
1883            reduction = None,
1884            cellsize = 10,
1885            verbose = False,
1886            origin = None):
1887    print 'sww2asc will soon be obsoleted - please use sww2dem'
1888    sww2dem(basename_in,
1889            basename_out = basename_out,
1890            quantity = quantity,
1891            timestep = timestep,
1892            reduction = reduction,
1893            cellsize = cellsize,
1894            verbose = verbose,
1895            origin = origin,
1896        datum = 'WGS84',
1897        format = 'asc')
1898
1899def sww2ers(basename_in, basename_out = None,
1900            quantity = None,
1901            timestep = None,
1902            reduction = None,
1903            cellsize = 10,
1904            verbose = False,
1905            origin = None,
1906            datum = 'WGS84'):
1907    print 'sww2ers will soon be obsoleted - please use sww2dem'
1908    sww2dem(basename_in,
1909            basename_out = basename_out,
1910            quantity = quantity,
1911            timestep = timestep,
1912            reduction = reduction,
1913            cellsize = cellsize,
1914            verbose = verbose,
1915            origin = origin,
1916        datum = datum,
1917        format = 'ers')
1918################################# END COMPATIBILITY ##############
1919
1920
1921
1922def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1923                                  use_cache = False,
1924                                  verbose = False):
1925    """Read Digitial Elevation model from the following ASCII format (.asc)
1926
1927    Example:
1928
1929    ncols         3121
1930    nrows         1800
1931    xllcorner     722000
1932    yllcorner     5893000
1933    cellsize      25
1934    NODATA_value  -9999
1935    138.3698 137.4194 136.5062 135.5558 ..........
1936
1937    Convert basename_in + '.asc' to NetCDF format (.dem)
1938    mimicking the ASCII format closely.
1939
1940
1941    An accompanying file with same basename_in but extension .prj must exist
1942    and is used to fix the UTM zone, datum, false northings and eastings.
1943
1944    The prj format is assumed to be as
1945
1946    Projection    UTM
1947    Zone          56
1948    Datum         WGS84
1949    Zunits        NO
1950    Units         METERS
1951    Spheroid      WGS84
1952    Xshift        0.0000000000
1953    Yshift        10000000.0000000000
1954    Parameters
1955    """
1956
1957
1958
1959    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1960
1961    if use_cache is True:
1962        from caching import cache
1963        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1964                       dependencies = [basename_in + '.asc'],
1965                       verbose = verbose)
1966
1967    else:
1968        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1969
1970    return result
1971
1972
1973
1974
1975
1976
1977def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1978                                  verbose = False):
1979    """Read Digitial Elevation model from the following ASCII format (.asc)
1980
1981    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1982    """
1983
1984    import os
1985    from Scientific.IO.NetCDF import NetCDFFile
1986    from Numeric import Float, array
1987
1988    #root, ext = os.path.splitext(basename_in)
1989    root = basename_in
1990
1991    ###########################################
1992    # Read Meta data
1993    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1994    metadatafile = open(root + '.prj')
1995    metalines = metadatafile.readlines()
1996    metadatafile.close()
1997
1998    L = metalines[0].strip().split()
1999    assert L[0].strip().lower() == 'projection'
2000    projection = L[1].strip()                   #TEXT
2001
2002    L = metalines[1].strip().split()
2003    assert L[0].strip().lower() == 'zone'
2004    zone = int(L[1].strip())
2005
2006    L = metalines[2].strip().split()
2007    assert L[0].strip().lower() == 'datum'
2008    datum = L[1].strip()                        #TEXT
2009
2010    L = metalines[3].strip().split()
2011    assert L[0].strip().lower() == 'zunits'     #IGNORE
2012    zunits = L[1].strip()                       #TEXT
2013
2014    L = metalines[4].strip().split()
2015    assert L[0].strip().lower() == 'units'
2016    units = L[1].strip()                        #TEXT
2017
2018    L = metalines[5].strip().split()
2019    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2020    spheroid = L[1].strip()                     #TEXT
2021
2022    L = metalines[6].strip().split()
2023    assert L[0].strip().lower() == 'xshift'
2024    false_easting = float(L[1].strip())
2025
2026    L = metalines[7].strip().split()
2027    assert L[0].strip().lower() == 'yshift'
2028    false_northing = float(L[1].strip())
2029
2030    #print false_easting, false_northing, zone, datum
2031
2032
2033    ###########################################
2034    #Read DEM data
2035
2036    datafile = open(basename_in + '.asc')
2037
2038    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2039    lines = datafile.readlines()
2040    datafile.close()
2041
2042    if verbose: print 'Got', len(lines), ' lines'
2043
2044    ncols = int(lines[0].split()[1].strip())
2045    nrows = int(lines[1].split()[1].strip())
2046    xllcorner = float(lines[2].split()[1].strip())
2047    yllcorner = float(lines[3].split()[1].strip())
2048    cellsize = float(lines[4].split()[1].strip())
2049    NODATA_value = int(lines[5].split()[1].strip())
2050
2051    assert len(lines) == nrows + 6
2052
2053
2054    ##########################################
2055
2056
2057    if basename_out == None:
2058        netcdfname = root + '.dem'
2059    else:
2060        netcdfname = basename_out + '.dem'
2061
2062    if verbose: print 'Store to NetCDF file %s' %netcdfname
2063    # NetCDF file definition
2064    fid = NetCDFFile(netcdfname, 'w')
2065
2066    #Create new file
2067    fid.institution = 'Geoscience Australia'
2068    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2069                      'of spatial point data'
2070
2071    fid.ncols = ncols
2072    fid.nrows = nrows
2073    fid.xllcorner = xllcorner
2074    fid.yllcorner = yllcorner
2075    fid.cellsize = cellsize
2076    fid.NODATA_value = NODATA_value
2077
2078    fid.zone = zone
2079    fid.false_easting = false_easting
2080    fid.false_northing = false_northing
2081    fid.projection = projection
2082    fid.datum = datum
2083    fid.units = units
2084
2085
2086    # dimension definitions
2087    fid.createDimension('number_of_rows', nrows)
2088    fid.createDimension('number_of_columns', ncols)
2089
2090    # variable definitions
2091    fid.createVariable('elevation', Float, ('number_of_rows',
2092                                            'number_of_columns'))
2093
2094    # Get handles to the variables
2095    elevation = fid.variables['elevation']
2096
2097    #Store data
2098    n = len(lines[6:])
2099    for i, line in enumerate(lines[6:]):
2100        fields = line.split()
2101        if verbose and i%((n+10)/10)==0:
2102            print 'Processing row %d of %d' %(i, nrows)
2103
2104        elevation[i, :] = array([float(x) for x in fields])
2105
2106    fid.close()
2107
2108
2109
2110
2111
2112def ferret2sww(basename_in, basename_out = None,
2113               verbose = False,
2114               minlat = None, maxlat = None,
2115               minlon = None, maxlon = None,
2116               mint = None, maxt = None, mean_stage = 0,
2117               origin = None, zscale = 1,
2118               fail_on_NaN = True,
2119               NaN_filler = 0,
2120               elevation = None,
2121               inverted_bathymetry = False
2122               ): #FIXME: Bathymetry should be obtained
2123                                  #from MOST somehow.
2124                                  #Alternatively from elsewhere
2125                                  #or, as a last resort,
2126                                  #specified here.
2127                                  #The value of -100 will work
2128                                  #for the Wollongong tsunami
2129                                  #scenario but is very hacky
2130    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2131    sww format native to pyvolution.
2132
2133    Specify only basename_in and read files of the form
2134    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2135    relative height, x-velocity and y-velocity, respectively.
2136
2137    Also convert latitude and longitude to UTM. All coordinates are
2138    assumed to be given in the GDA94 datum.
2139
2140    min's and max's: If omitted - full extend is used.
2141    To include a value min may equal it, while max must exceed it.
2142    Lat and lon are assuemd to be in decimal degrees
2143
2144    origin is a 3-tuple with geo referenced
2145    UTM coordinates (zone, easting, northing)
2146
2147    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2148    which means that longitude is the fastest
2149    varying dimension (row major order, so to speak)
2150
2151    ferret2sww uses grid points as vertices in a triangular grid
2152    counting vertices from lower left corner upwards, then right
2153    """
2154
2155    import os
2156    from Scientific.IO.NetCDF import NetCDFFile
2157    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2158    from Numeric import allclose, around
2159
2160    precision = Float
2161
2162    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2163
2164    if minlat != None:
2165        assert -90 < minlat < 90 , msg
2166    if maxlat != None:
2167        assert -90 < maxlat < 90 , msg
2168    if minlon != None:
2169        assert -180 < minlon < 180 , msg
2170    if maxlon != None:
2171        assert -180 < maxlon < 180 , msg
2172
2173
2174    #Get NetCDF data
2175    if verbose: print 'Reading files %s_*.nc' %basename_in
2176    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2177    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2178    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2179    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2180
2181    if basename_out is None:
2182        swwname = basename_in + '.sww'
2183    else:
2184        swwname = basename_out + '.sww'
2185
2186#    Get dimensions of file_h
2187    for dimension in file_h.dimensions.keys():
2188        if dimension[:3] == 'LON':
2189            dim_h_longitude = dimension
2190        if dimension[:3] == 'LAT':
2191            dim_h_latitude = dimension
2192        if dimension[:4] == 'TIME':
2193            dim_h_time = dimension
2194
2195#    print 'long:', dim_h_longitude
2196#    print 'lats:', dim_h_latitude
2197#    print 'times:', dim_h_time
2198
2199    times = file_h.variables[dim_h_time]
2200    latitudes = file_h.variables[dim_h_latitude]
2201    longitudes = file_h.variables[dim_h_longitude]
2202   
2203# get dimensions for file_e
2204    for dimension in file_e.dimensions.keys():
2205        if dimension[:3] == 'LON':
2206            dim_e_longitude = dimension
2207        if dimension[:3] == 'LAT':
2208            dim_e_latitude = dimension
2209        if dimension[:4] == 'TIME':
2210            dim_e_time = dimension
2211
2212# get dimensions for file_u
2213    for dimension in file_u.dimensions.keys():
2214        if dimension[:3] == 'LON':
2215            dim_u_longitude = dimension
2216        if dimension[:3] == 'LAT':
2217            dim_u_latitude = dimension
2218        if dimension[:4] == 'TIME':
2219            dim_u_time = dimension
2220           
2221# get dimensions for file_v
2222    for dimension in file_v.dimensions.keys():
2223        if dimension[:3] == 'LON':
2224            dim_v_longitude = dimension
2225        if dimension[:3] == 'LAT':
2226            dim_v_latitude = dimension
2227        if dimension[:4] == 'TIME':
2228            dim_v_time = dimension
2229
2230
2231    #Precision used by most for lat/lon is 4 or 5 decimals
2232    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2233    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2234
2235    #Check that files are compatible
2236    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2237    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2238    assert allclose(latitudes, e_lat)
2239
2240    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2241    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2242    assert allclose(longitudes, e_lon)
2243
2244    if mint == None:
2245        jmin = 0
2246    else:
2247        jmin = searchsorted(times, mint)
2248
2249    if maxt == None:
2250        jmax=len(times)
2251    else:
2252        jmax = searchsorted(times, maxt)
2253
2254    if minlat == None:
2255        kmin=0
2256    else:
2257        kmin = searchsorted(latitudes, minlat)
2258
2259    if maxlat == None:
2260        kmax = len(latitudes)
2261    else:
2262        kmax = searchsorted(latitudes, maxlat)
2263
2264    if minlon == None:
2265        lmin=0
2266    else:
2267        lmin = searchsorted(longitudes, minlon)
2268
2269    if maxlon == None:
2270        lmax = len(longitudes)
2271    else:
2272        lmax = searchsorted(longitudes, maxlon)
2273
2274#    print' j', jmin, jmax
2275    times = times[jmin:jmax]
2276    latitudes = latitudes[kmin:kmax]
2277    longitudes = longitudes[lmin:lmax]
2278
2279
2280    if verbose: print 'cropping'
2281    zname = 'ELEVATION'
2282
2283    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2284    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2285    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2286    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2287
2288    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2289    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2290    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2291    #        from Numeric import asarray
2292    #        elevations=elevations.tolist()
2293    #        elevations.reverse()
2294    #        elevations=asarray(elevations)
2295    #    else:
2296    #        from Numeric import asarray
2297    #        elevations=elevations.tolist()
2298    #        elevations.reverse()
2299    #        elevations=asarray(elevations)
2300    #        'print hmmm'
2301
2302
2303
2304    #Get missing values
2305    nan_ha = file_h.variables['HA'].missing_value[0]
2306    nan_ua = file_u.variables['UA'].missing_value[0]
2307    nan_va = file_v.variables['VA'].missing_value[0]
2308    if hasattr(file_e.variables[zname],'missing_value'):
2309        nan_e  = file_e.variables[zname].missing_value[0]
2310    else:
2311        nan_e = None
2312
2313    #Cleanup
2314    from Numeric import sometrue
2315
2316    missing = (amplitudes == nan_ha)
2317    if sometrue (missing):
2318        if fail_on_NaN:
2319            msg = 'NetCDFFile %s contains missing values'\
2320                  %(basename_in+'_ha.nc')
2321            raise DataMissingValuesError, msg
2322        else:
2323            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2324
2325    missing = (uspeed == nan_ua)
2326    if sometrue (missing):
2327        if fail_on_NaN:
2328            msg = 'NetCDFFile %s contains missing values'\
2329                  %(basename_in+'_ua.nc')
2330            raise DataMissingValuesError, msg
2331        else:
2332            uspeed = uspeed*(missing==0) + missing*NaN_filler
2333
2334    missing = (vspeed == nan_va)
2335    if sometrue (missing):
2336        if fail_on_NaN:
2337            msg = 'NetCDFFile %s contains missing values'\
2338                  %(basename_in+'_va.nc')
2339            raise DataMissingValuesError, msg
2340        else:
2341            vspeed = vspeed*(missing==0) + missing*NaN_filler
2342
2343
2344    missing = (elevations == nan_e)
2345    if sometrue (missing):
2346        if fail_on_NaN:
2347            msg = 'NetCDFFile %s contains missing values'\
2348                  %(basename_in+'_e.nc')
2349            raise DataMissingValuesError, msg
2350        else:
2351            elevations = elevations*(missing==0) + missing*NaN_filler
2352
2353    #######
2354
2355
2356
2357    number_of_times = times.shape[0]
2358    number_of_latitudes = latitudes.shape[0]
2359    number_of_longitudes = longitudes.shape[0]
2360
2361    assert amplitudes.shape[0] == number_of_times
2362    assert amplitudes.shape[1] == number_of_latitudes
2363    assert amplitudes.shape[2] == number_of_longitudes
2364
2365    if verbose:
2366        print '------------------------------------------------'
2367        print 'Statistics:'
2368        print '  Extent (lat/lon):'
2369        print '    lat in [%f, %f], len(lat) == %d'\
2370              %(min(latitudes.flat), max(latitudes.flat),
2371                len(latitudes.flat))
2372        print '    lon in [%f, %f], len(lon) == %d'\
2373              %(min(longitudes.flat), max(longitudes.flat),
2374                len(longitudes.flat))
2375        print '    t in [%f, %f], len(t) == %d'\
2376              %(min(times.flat), max(times.flat), len(times.flat))
2377
2378        q = amplitudes.flat
2379        name = 'Amplitudes (ha) [cm]'
2380        print %s in [%f, %f]' %(name, min(q), max(q))
2381
2382        q = uspeed.flat
2383        name = 'Speeds (ua) [cm/s]'
2384        print %s in [%f, %f]' %(name, min(q), max(q))
2385
2386        q = vspeed.flat
2387        name = 'Speeds (va) [cm/s]'
2388        print %s in [%f, %f]' %(name, min(q), max(q))
2389
2390        q = elevations.flat
2391        name = 'Elevations (e) [m]'
2392        print %s in [%f, %f]' %(name, min(q), max(q))
2393
2394
2395    #print number_of_latitudes, number_of_longitudes
2396    number_of_points = number_of_latitudes*number_of_longitudes
2397    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2398
2399
2400    file_h.close()
2401    file_u.close()
2402    file_v.close()
2403    file_e.close()
2404
2405
2406    # NetCDF file definition
2407    outfile = NetCDFFile(swwname, 'w')
2408
2409    #Create new file
2410    outfile.institution = 'Geoscience Australia'
2411    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2412                          %(basename_in + '_ha.nc',
2413                            basename_in + '_ua.nc',
2414                            basename_in + '_va.nc',
2415                            basename_in + '_e.nc')
2416
2417
2418    #For sww compatibility
2419    outfile.smoothing = 'Yes'
2420    outfile.order = 1
2421
2422    #Start time in seconds since the epoch (midnight 1/1/1970)
2423    outfile.starttime = starttime = times[0]
2424    times = times - starttime  #Store relative times
2425
2426    # dimension definitions
2427    outfile.createDimension('number_of_volumes', number_of_volumes)
2428
2429    outfile.createDimension('number_of_vertices', 3)
2430    outfile.createDimension('number_of_points', number_of_points)
2431
2432
2433    #outfile.createDimension('number_of_timesteps', len(times))
2434    outfile.createDimension('number_of_timesteps', len(times))
2435
2436    # variable definitions
2437    outfile.createVariable('x', precision, ('number_of_points',))
2438    outfile.createVariable('y', precision, ('number_of_points',))
2439    outfile.createVariable('elevation', precision, ('number_of_points',))
2440
2441    #FIXME: Backwards compatibility
2442    outfile.createVariable('z', precision, ('number_of_points',))
2443    #################################
2444
2445    outfile.createVariable('volumes', Int, ('number_of_volumes',
2446                                            'number_of_vertices'))
2447
2448    outfile.createVariable('time', precision,
2449                           ('number_of_timesteps',))
2450
2451    outfile.createVariable('stage', precision,
2452                           ('number_of_timesteps',
2453                            'number_of_points'))
2454
2455    outfile.createVariable('xmomentum', precision,
2456                           ('number_of_timesteps',
2457                            'number_of_points'))
2458
2459    outfile.createVariable('ymomentum', precision,
2460                           ('number_of_timesteps',
2461                            'number_of_points'))
2462
2463
2464    #Store
2465    from coordinate_transforms.redfearn import redfearn
2466    x = zeros(number_of_points, Float)  #Easting
2467    y = zeros(number_of_points, Float)  #Northing
2468
2469
2470    if verbose: print 'Making triangular grid'
2471    #Check zone boundaries
2472    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2473
2474    vertices = {}
2475    i = 0
2476    for k, lat in enumerate(latitudes):       #Y direction
2477        for l, lon in enumerate(longitudes):  #X direction
2478
2479            vertices[l,k] = i
2480
2481            zone, easting, northing = redfearn(lat,lon)
2482
2483            msg = 'Zone boundary crossed at longitude =', lon
2484            #assert zone == refzone, msg
2485            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2486            x[i] = easting
2487            y[i] = northing
2488            i += 1
2489
2490
2491    #Construct 2 triangles per 'rectangular' element
2492    volumes = []
2493    for l in range(number_of_longitudes-1):    #X direction
2494        for k in range(number_of_latitudes-1): #Y direction
2495            v1 = vertices[l,k+1]
2496            v2 = vertices[l,k]
2497            v3 = vertices[l+1,k+1]
2498            v4 = vertices[l+1,k]
2499
2500            volumes.append([v1,v2,v3]) #Upper element
2501            volumes.append([v4,v3,v2]) #Lower element
2502
2503    volumes = array(volumes)
2504
2505    if origin == None:
2506        zone = refzone
2507        xllcorner = min(x)
2508        yllcorner = min(y)
2509    else:
2510        zone = origin[0]
2511        xllcorner = origin[1]
2512        yllcorner = origin[2]
2513
2514
2515    outfile.xllcorner = xllcorner
2516    outfile.yllcorner = yllcorner
2517    outfile.zone = zone
2518
2519
2520    if elevation is not None:
2521        z = elevation
2522    else:
2523        if inverted_bathymetry:
2524            z = -1*elevations
2525        else:
2526            z = elevations
2527    #FIXME: z should be obtained from MOST and passed in here
2528
2529    from Numeric import resize
2530    z = resize(z,outfile.variables['z'][:].shape)
2531    outfile.variables['x'][:] = x - xllcorner
2532    outfile.variables['y'][:] = y - yllcorner
2533    outfile.variables['z'][:] = z             #FIXME HACK
2534    outfile.variables['elevation'][:] = z
2535    outfile.variables['time'][:] = times   #Store time relative
2536    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2537
2538
2539
2540    #Time stepping
2541    stage = outfile.variables['stage']
2542    xmomentum = outfile.variables['xmomentum']
2543    ymomentum = outfile.variables['ymomentum']
2544
2545    if verbose: print 'Converting quantities'
2546    n = len(times)
2547    for j in range(n):
2548        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2549        i = 0
2550        for k in range(number_of_latitudes):      #Y direction
2551            for l in range(number_of_longitudes): #X direction
2552                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2553                stage[j,i] = w
2554                h = w - z[i]
2555                xmomentum[j,i] = uspeed[j,k,l]/100*h
2556                ymomentum[j,i] = vspeed[j,k,l]/100*h
2557                i += 1
2558
2559    #outfile.close()
2560
2561    #FIXME: Refactor using code from file_function.statistics
2562    #Something like print swwstats(swwname)
2563    if verbose:
2564        x = outfile.variables['x'][:]
2565        y = outfile.variables['y'][:]
2566        times = outfile.variables['time'][:]
2567        print '------------------------------------------------'
2568        print 'Statistics of output file:'
2569        print '  Name: %s' %swwname
2570        print '  Reference:'
2571        print '    Lower left corner: [%f, %f]'\
2572              %(xllcorner, yllcorner)
2573        print '    Start time: %f' %starttime
2574        print '  Extent:'
2575        print '    x [m] in [%f, %f], len(x) == %d'\
2576              %(min(x.flat), max(x.flat), len(x.flat))
2577        print '    y [m] in [%f, %f], len(y) == %d'\
2578              %(min(y.flat), max(y.flat), len(y.flat))
2579        print '    t [s] in [%f, %f], len(t) == %d'\
2580              %(min(times), max(times), len(times))
2581        print '  Quantities [SI units]:'
2582        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2583            q = outfile.variables[name][:].flat
2584            print '    %s in [%f, %f]' %(name, min(q), max(q))
2585
2586
2587
2588    outfile.close()
2589
2590
2591
2592
2593
2594def timefile2netcdf(filename, quantity_names = None):
2595    """Template for converting typical text files with time series to
2596    NetCDF tms file.
2597
2598
2599    The file format is assumed to be either two fields separated by a comma:
2600
2601        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2602
2603    E.g
2604
2605      31/08/04 00:00:00, 1.328223 0 0
2606      31/08/04 00:15:00, 1.292912 0 0
2607
2608    will provide a time dependent function f(t) with three attributes
2609
2610    filename is assumed to be the rootname with extenisons .txt and .sww
2611    """
2612
2613    import time, calendar
2614    from Numeric import array
2615    from config import time_format
2616    from utilities.numerical_tools import ensure_numeric
2617
2618
2619
2620    fid = open(filename + '.txt')
2621    line = fid.readline()
2622    fid.close()
2623
2624    fields = line.split(',')
2625    msg = 'File %s must have the format date, value0 value1 value2 ...'
2626    assert len(fields) == 2, msg
2627
2628    try:
2629        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2630    except ValueError:
2631        msg = 'First field in file %s must be' %filename
2632        msg += ' date-time with format %s.\n' %time_format
2633        msg += 'I got %s instead.' %fields[0]
2634        raise DataTimeError, msg
2635
2636
2637    #Split values
2638    values = []
2639    for value in fields[1].split():
2640        values.append(float(value))
2641
2642    q = ensure_numeric(values)
2643
2644    msg = 'ERROR: File must contain at least one independent value'
2645    assert len(q.shape) == 1, msg
2646
2647
2648
2649    #Read times proper
2650    from Numeric import zeros, Float, alltrue
2651    from config import time_format
2652    import time, calendar
2653
2654    fid = open(filename + '.txt')
2655    lines = fid.readlines()
2656    fid.close()
2657
2658    N = len(lines)
2659    d = len(q)
2660
2661    T = zeros(N, Float)       #Time
2662    Q = zeros((N, d), Float)  #Values
2663
2664    for i, line in enumerate(lines):
2665        fields = line.split(',')
2666        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2667
2668        T[i] = realtime - starttime
2669
2670        for j, value in enumerate(fields[1].split()):
2671            Q[i, j] = float(value)
2672
2673    msg = 'File %s must list time as a monotonuosly ' %filename
2674    msg += 'increasing sequence'
2675    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2676
2677
2678    #Create NetCDF file
2679    from Scientific.IO.NetCDF import NetCDFFile
2680
2681    fid = NetCDFFile(filename + '.tms', 'w')
2682
2683
2684    fid.institution = 'Geoscience Australia'
2685    fid.description = 'Time series'
2686
2687
2688    #Reference point
2689    #Start time in seconds since the epoch (midnight 1/1/1970)
2690    #FIXME: Use Georef
2691    fid.starttime = starttime
2692
2693
2694    # dimension definitions
2695    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2696    #fid.createDimension('number_of_vertices', 3)
2697
2698
2699    fid.createDimension('number_of_timesteps', len(T))
2700
2701    fid.createVariable('time', Float, ('number_of_timesteps',))
2702
2703    fid.variables['time'][:] = T
2704
2705    for i in range(Q.shape[1]):
2706        try:
2707            name = quantity_names[i]
2708        except:
2709            name = 'Attribute%d'%i
2710
2711        fid.createVariable(name, Float, ('number_of_timesteps',))
2712        fid.variables[name][:] = Q[:,i]
2713
2714    fid.close()
2715
2716
2717def extent_sww(file_name):
2718    """
2719    Read in an sww file.
2720
2721    Input;
2722    file_name - the sww file
2723
2724    Output;
2725    z - Vector of bed elevation
2726    volumes - Array.  Each row has 3 values, representing
2727    the vertices that define the volume
2728    time - Vector of the times where there is stage information
2729    stage - array with respect to time and vertices (x,y)
2730    """
2731
2732
2733    from Scientific.IO.NetCDF import NetCDFFile
2734
2735    #Check contents
2736    #Get NetCDF
2737    fid = NetCDFFile(file_name, 'r')
2738
2739    # Get the variables
2740    x = fid.variables['x'][:]
2741    y = fid.variables['y'][:]
2742    stage = fid.variables['stage'][:]
2743    #print "stage",stage
2744    #print "stage.shap",stage.shape
2745    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2746    #print "min(stage)",min(stage)
2747
2748    fid.close()
2749
2750    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2751
2752
2753def sww2domain(filename,boundary=None,t=None,\
2754               fail_if_NaN=True,NaN_filler=0\
2755               ,verbose = False,very_verbose = False):
2756    """
2757    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2758
2759    Boundary is not recommended if domain.smooth is not selected, as it
2760    uses unique coordinates, but not unique boundaries. This means that
2761    the boundary file will not be compatable with the coordinates, and will
2762    give a different final boundary, or crash.
2763    """
2764    NaN=9.969209968386869e+036
2765    #initialise NaN.
2766
2767    from Scientific.IO.NetCDF import NetCDFFile
2768    from shallow_water import Domain
2769    from Numeric import asarray, transpose, resize
2770
2771    if verbose: print 'Reading from ', filename
2772    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2773    time = fid.variables['time']       #Timesteps
2774    if t is None:
2775        t = time[-1]
2776    time_interp = get_time_interp(time,t)
2777
2778    # Get the variables as Numeric arrays
2779    x = fid.variables['x'][:]             #x-coordinates of vertices
2780    y = fid.variables['y'][:]             #y-coordinates of vertices
2781    elevation = fid.variables['elevation']     #Elevation
2782    stage = fid.variables['stage']     #Water level
2783    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2784    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2785
2786    starttime = fid.starttime[0]
2787    volumes = fid.variables['volumes'][:] #Connectivity
2788    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2789
2790    conserved_quantities = []
2791    interpolated_quantities = {}
2792    other_quantities = []
2793
2794    # get geo_reference
2795    #sww files don't have to have a geo_ref
2796    try:
2797        geo_reference = Geo_reference(NetCDFObject=fid)
2798    except: #AttributeError, e:
2799        geo_reference = None
2800
2801    if verbose: print '    getting quantities'
2802    for quantity in fid.variables.keys():
2803        dimensions = fid.variables[quantity].dimensions
2804        if 'number_of_timesteps' in dimensions:
2805            conserved_quantities.append(quantity)
2806            interpolated_quantities[quantity]=\
2807                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2808        else: other_quantities.append(quantity)
2809
2810    other_quantities.remove('x')
2811    other_quantities.remove('y')
2812    other_quantities.remove('z')
2813    other_quantities.remove('volumes')
2814
2815    conserved_quantities.remove('time')
2816
2817    if verbose: print '    building domain'
2818    #    From domain.Domain:
2819    #    domain = Domain(coordinates, volumes,\
2820    #                    conserved_quantities = conserved_quantities,\
2821    #                    other_quantities = other_quantities,zone=zone,\
2822    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2823
2824    #   From shallow_water.Domain:
2825    coordinates=coordinates.tolist()
2826    volumes=volumes.tolist()
2827    #FIXME:should this be in mesh?(peter row)
2828    if fid.smoothing == 'Yes': unique = False
2829    else: unique = True
2830    if unique:
2831        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2832
2833
2834    try:
2835        domain = Domain(coordinates, volumes, boundary)
2836    except AssertionError, e:
2837        fid.close()
2838        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2839        raise DataDomainError, msg
2840
2841    if not boundary is None:
2842        domain.boundary = boundary
2843
2844    domain.geo_reference = geo_reference
2845
2846    domain.starttime=float(starttime)+float(t)
2847    domain.time=0.0
2848
2849    for quantity in other_quantities:
2850        try:
2851            NaN = fid.variables[quantity].missing_value
2852        except:
2853            pass #quantity has no missing_value number
2854        X = fid.variables[quantity][:]
2855        if very_verbose:
2856            print '       ',quantity
2857            print '        NaN =',NaN
2858            print '        max(X)'
2859            print '       ',max(X)
2860            print '        max(X)==NaN'
2861            print '       ',max(X)==NaN
2862            print ''
2863        if (max(X)==NaN) or (min(X)==NaN):
2864            if fail_if_NaN:
2865                msg = 'quantity "%s" contains no_data entry'%quantity
2866                raise DataMissingValuesError, msg
2867            else:
2868                data = (X<>NaN)
2869                X = (X*data)+(data==0)*NaN_filler
2870        if unique:
2871            X = resize(X,(len(X)/3,3))
2872        domain.set_quantity(quantity,X)
2873    #
2874    for quantity in conserved_quantities:
2875        try:
2876            NaN = fid.variables[quantity].missing_value
2877        except:
2878            pass #quantity has no missing_value number
2879        X = interpolated_quantities[quantity]
2880        if very_verbose:
2881            print '       ',quantity
2882            print '        NaN =',NaN
2883            print '        max(X)'
2884            print '       ',max(X)
2885            print '        max(X)==NaN'
2886            print '       ',max(X)==NaN
2887            print ''
2888        if (max(X)==NaN) or (min(X)==NaN):
2889            if fail_if_NaN:
2890                msg = 'quantity "%s" contains no_data entry'%quantity
2891                raise DataMissingValuesError, msg
2892            else:
2893                data = (X<>NaN)
2894                X = (X*data)+(data==0)*NaN_filler
2895        if unique:
2896            X = resize(X,(X.shape[0]/3,3))
2897        domain.set_quantity(quantity,X)
2898
2899    fid.close()
2900    return domain
2901
2902def interpolated_quantity(saved_quantity,time_interp):
2903
2904    #given an index and ratio, interpolate quantity with respect to time.
2905    index,ratio = time_interp
2906    Q = saved_quantity
2907    if ratio > 0:
2908        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2909    else:
2910        q = Q[index]
2911    #Return vector of interpolated values
2912    return q
2913
2914def get_time_interp(time,t=None):
2915    #Finds the ratio and index for time interpolation.
2916    #It is borrowed from previous pyvolution code.
2917    if t is None:
2918        t=time[-1]
2919        index = -1
2920        ratio = 0.
2921    else:
2922        T = time
2923        tau = t
2924        index=0
2925        msg = 'Time interval derived from file %s [%s:%s]'\
2926            %('FIXMEfilename', T[0], T[-1])
2927        msg += ' does not match model time: %s' %tau
2928        if tau < time[0]: raise DataTimeError, msg
2929        if tau > time[-1]: raise DataTimeError, msg
2930        while tau > time[index]: index += 1
2931        while tau < time[index]: index -= 1
2932        if tau == time[index]:
2933            #Protect against case where tau == time[-1] (last time)
2934            # - also works in general when tau == time[i]
2935            ratio = 0
2936        else:
2937            #t is now between index and index+1
2938            ratio = (tau - time[index])/(time[index+1] - time[index])
2939    return (index,ratio)
2940
2941
2942def weed(coordinates,volumes,boundary = None):
2943    if type(coordinates)=='array':
2944        coordinates = coordinates.tolist()
2945    if type(volumes)=='array':
2946        volumes = volumes.tolist()
2947
2948    unique = False
2949    point_dict = {}
2950    same_point = {}
2951    for i in range(len(coordinates)):
2952        point = tuple(coordinates[i])
2953        if point_dict.has_key(point):
2954            unique = True
2955            same_point[i]=point
2956            #to change all point i references to point j
2957        else:
2958            point_dict[point]=i
2959            same_point[i]=point
2960
2961    coordinates = []
2962    i = 0
2963    for point in point_dict.keys():
2964        point = tuple(point)
2965        coordinates.append(list(point))
2966        point_dict[point]=i
2967        i+=1
2968
2969
2970    for volume in volumes:
2971        for i in range(len(volume)):
2972            index = volume[i]
2973            if index>-1:
2974                volume[i]=point_dict[same_point[index]]
2975
2976    new_boundary = {}
2977    if not boundary is None:
2978        for segment in boundary.keys():
2979            point0 = point_dict[same_point[segment[0]]]
2980            point1 = point_dict[same_point[segment[1]]]
2981            label = boundary[segment]
2982            #FIXME should the bounday attributes be concaterated
2983            #('exterior, pond') or replaced ('pond')(peter row)
2984
2985            if new_boundary.has_key((point0,point1)):
2986                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2987                                              #+','+label
2988
2989            elif new_boundary.has_key((point1,point0)):
2990                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2991                                              #+','+label
2992            else: new_boundary[(point0,point1)]=label
2993
2994        boundary = new_boundary
2995
2996    return coordinates,volumes,boundary
2997
2998
2999def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3000                 verbose=False):
3001    """Read Digitial Elevation model from the following NetCDF format (.dem)
3002
3003    Example:
3004
3005    ncols         3121
3006    nrows         1800
3007    xllcorner     722000
3008    yllcorner     5893000
3009    cellsize      25
3010    NODATA_value  -9999
3011    138.3698 137.4194 136.5062 135.5558 ..........
3012
3013    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3014    """
3015
3016    import os
3017    from Scientific.IO.NetCDF import NetCDFFile
3018    from Numeric import Float, zeros, sum, reshape, equal
3019
3020    root = basename_in
3021    inname = root + '.dem'
3022
3023    #Open existing netcdf file to read
3024    infile = NetCDFFile(inname, 'r')
3025    if verbose: print 'Reading DEM from %s' %inname
3026
3027    #Read metadata
3028    ncols = infile.ncols[0]
3029    nrows = infile.nrows[0]
3030    xllcorner = infile.xllcorner[0]
3031    yllcorner = infile.yllcorner[0]
3032    cellsize = infile.cellsize[0]
3033    NODATA_value = infile.NODATA_value[0]
3034    zone = infile.zone[0]
3035    false_easting = infile.false_easting[0]
3036    false_northing = infile.false_northing[0]
3037    projection = infile.projection
3038    datum = infile.datum
3039    units = infile.units
3040
3041    dem_elevation = infile.variables['elevation']
3042
3043    #Get output file name
3044    if basename_out == None:
3045        outname = root + '_' + repr(cellsize_new) + '.dem'
3046    else:
3047        outname = basename_out + '.dem'
3048
3049    if verbose: print 'Write decimated NetCDF file to %s' %outname
3050
3051    #Determine some dimensions for decimated grid
3052    (nrows_stencil, ncols_stencil) = stencil.shape
3053    x_offset = ncols_stencil / 2
3054    y_offset = nrows_stencil / 2
3055    cellsize_ratio = int(cellsize_new / cellsize)
3056    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3057    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3058
3059    #Open netcdf file for output
3060    outfile = NetCDFFile(outname, 'w')
3061
3062    #Create new file
3063    outfile.institution = 'Geoscience Australia'
3064    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3065                           'of spatial point data'
3066    #Georeferencing
3067    outfile.zone = zone
3068    outfile.projection = projection
3069    outfile.datum = datum
3070    outfile.units = units
3071
3072    outfile.cellsize = cellsize_new
3073    outfile.NODATA_value = NODATA_value
3074    outfile.false_easting = false_easting
3075    outfile.false_northing = false_northing
3076
3077    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3078    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3079    outfile.ncols = ncols_new
3080    outfile.nrows = nrows_new
3081
3082    # dimension definition
3083    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3084
3085    # variable definition
3086    outfile.createVariable('elevation', Float, ('number_of_points',))
3087
3088    # Get handle to the variable
3089    elevation = outfile.variables['elevation']
3090
3091    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3092
3093    #Store data
3094    global_index = 0
3095    for i in range(nrows_new):
3096        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3097        lower_index = global_index
3098        telev =  zeros(ncols_new, Float)
3099        local_index = 0
3100        trow = i * cellsize_ratio
3101
3102        for j in range(ncols_new):
3103            tcol = j * cellsize_ratio
3104            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3105
3106            #if dem contains 1 or more NODATA_values set value in
3107            #decimated dem to NODATA_value, else compute decimated
3108            #value using stencil
3109            if sum(sum(equal(tmp, NODATA_value))) > 0:
3110                telev[local_index] = NODATA_value
3111            else:
3112                telev[local_index] = sum(sum(tmp * stencil))
3113
3114            global_index += 1
3115            local_index += 1
3116
3117        upper_index = global_index
3118
3119        elevation[lower_index:upper_index] = telev
3120
3121    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3122
3123    infile.close()
3124    outfile.close()
3125
3126
3127
3128
3129def tsh2sww(filename, verbose=False): #test_tsh2sww
3130    """
3131    to check if a tsh/msh file 'looks' good.
3132    """
3133
3134    #FIXME Move to data_manager
3135    from shallow_water import Domain
3136    from pmesh2domain import pmesh_to_domain_instance
3137    import time, os
3138    from data_manager import get_dataobject
3139    from os import sep, path
3140    from utilities.numerical_tools import mean
3141
3142    if verbose == True:print 'Creating domain from', filename
3143    domain = pmesh_to_domain_instance(filename, Domain)
3144    if verbose == True:print "Number of triangles = ", len(domain)
3145
3146    domain.smooth = True
3147    domain.format = 'sww'   #Native netcdf visualisation format
3148    file_path, filename = path.split(filename)
3149    filename, ext = path.splitext(filename)
3150    domain.filename = filename
3151    domain.reduction = mean
3152    if verbose == True:print "file_path",file_path
3153    if file_path == "":file_path = "."
3154    domain.set_datadir(file_path)
3155
3156    if verbose == True:
3157        print "Output written to " + domain.get_datadir() + sep + \
3158              domain.filename + "." + domain.format
3159    sww = get_dataobject(domain)
3160    sww.store_connectivity()
3161    sww.store_timestep('stage')
3162
3163
3164def asc_csiro2sww(bath_dir,
3165                  elevation_dir,
3166                  ucur_dir,
3167                  vcur_dir,
3168                  sww_file,
3169                  minlat = None, maxlat = None,
3170                  minlon = None, maxlon = None,
3171                  zscale=1,
3172                  mean_stage = 0,
3173                  fail_on_NaN = True,
3174                  elevation_NaN_filler = 0,
3175                  bath_prefix='ba',
3176                  elevation_prefix='el',
3177                  verbose=False):
3178    """
3179    Produce an sww boundary file, from esri ascii data from CSIRO.
3180
3181    Also convert latitude and longitude to UTM. All coordinates are
3182    assumed to be given in the GDA94 datum.
3183
3184    assume:
3185    All files are in esri ascii format
3186
3187    4 types of information
3188    bathymetry
3189    elevation
3190    u velocity
3191    v velocity
3192
3193    Assumptions
3194    The metadata of all the files is the same
3195    Each type is in a seperate directory
3196    One bath file with extention .000
3197    The time period is less than 24hrs and uniform.
3198    """
3199    from Scientific.IO.NetCDF import NetCDFFile
3200
3201    from coordinate_transforms.redfearn import redfearn
3202
3203    precision = Float # So if we want to change the precision its done here
3204
3205    # go in to the bath dir and load the only file,
3206    bath_files = os.listdir(bath_dir)
3207    #print "bath_files",bath_files
3208
3209    #fixme: make more general?
3210    bath_file = bath_files[0]
3211    bath_dir_file =  bath_dir + os.sep + bath_file
3212    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3213    #print "bath_metadata",bath_metadata
3214    #print "bath_grid",bath_grid
3215
3216    #Use the date.time of the bath file as a basis for
3217    #the start time for other files
3218    base_start = bath_file[-12:]
3219
3220    #go into the elevation dir and load the 000 file
3221    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3222                         + base_start
3223    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3224    #print "elevation_dir_file",elevation_dir_file
3225    #print "elevation_grid", elevation_grid
3226
3227    elevation_files = os.listdir(elevation_dir)
3228    ucur_files = os.listdir(ucur_dir)
3229    vcur_files = os.listdir(vcur_dir)
3230
3231    # the first elevation file should be the
3232    # file with the same base name as the bath data
3233    #print "elevation_files[0]",elevation_files[0]
3234    #print "'el' + base_start",'el' + base_start
3235    assert elevation_files[0] == 'el' + base_start
3236
3237    #assert bath_metadata == elevation_metadata
3238
3239
3240
3241    number_of_latitudes = bath_grid.shape[0]
3242    number_of_longitudes = bath_grid.shape[1]
3243    #number_of_times = len(os.listdir(elevation_dir))
3244    #number_of_points = number_of_latitudes*number_of_longitudes
3245    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3246
3247    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3248                  for x in range(number_of_longitudes)]
3249    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3250                 for y in range(number_of_latitudes)]
3251
3252     # reverse order of lat, so the fist lat represents the first grid row
3253    latitudes.reverse()
3254
3255    #print "latitudes - before _get_min_max_indexes",latitudes
3256    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3257                                                 minlat=minlat, maxlat=maxlat,
3258                                                 minlon=minlon, maxlon=maxlon)
3259
3260
3261    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3262    #print "bath_grid",bath_grid
3263    latitudes = latitudes[kmin:kmax]
3264    longitudes = longitudes[lmin:lmax]
3265    number_of_latitudes = len(latitudes)
3266    number_of_longitudes = len(longitudes)
3267    number_of_times = len(os.listdir(elevation_dir))
3268    number_of_points = number_of_latitudes*number_of_longitudes
3269    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3270    #print "number_of_points",number_of_points
3271
3272    #Work out the times
3273    if len(elevation_files) > 1:
3274        # Assume: The time period is less than 24hrs.
3275        time_period = (int(elevation_files[1][-3:]) - \
3276                      int(elevation_files[0][-3:]))*60*60
3277        times = [x*time_period for x in range(len(elevation_files))]
3278    else:
3279        times = [0.0]
3280    #print "times", times
3281    #print "number_of_latitudes", number_of_latitudes
3282    #print "number_of_longitudes", number_of_longitudes
3283    #print "number_of_times", number_of_times
3284    #print "latitudes", latitudes
3285    #print "longitudes", longitudes
3286
3287
3288    if verbose:
3289        print '------------------------------------------------'
3290        print 'Statistics:'
3291        print '  Extent (lat/lon):'
3292        print '    lat in [%f, %f], len(lat) == %d'\
3293              %(min(latitudes), max(latitudes),
3294                len(latitudes))
3295        print '    lon in [%f, %f], len(lon) == %d'\
3296              %(min(longitudes), max(longitudes),
3297                len(longitudes))
3298        print '    t in [%f, %f], len(t) == %d'\
3299              %(min(times), max(times), len(times))
3300
3301    ######### WRITE THE SWW FILE #############
3302    # NetCDF file definition
3303    outfile = NetCDFFile(sww_file, 'w')
3304
3305    #Create new file
3306    outfile.institution = 'Geoscience Australia'
3307    outfile.description = 'Converted from XXX'
3308
3309
3310    #For sww compatibility
3311    outfile.smoothing = 'Yes'
3312    outfile.order = 1
3313
3314    #Start time in seconds since the epoch (midnight 1/1/1970)
3315    outfile.starttime = starttime = times[0]
3316
3317
3318    # dimension definitions
3319    outfile.createDimension('number_of_volumes', number_of_volumes)
3320
3321    outfile.createDimension('number_of_vertices', 3)
3322    outfile.createDimension('number_of_points', number_of_points)
3323    outfile.createDimension('number_of_timesteps', number_of_times)
3324
3325    # variable definitions
3326    outfile.createVariable('x', precision, ('number_of_points',))
3327    outfile.createVariable('y', precision, ('number_of_points',))
3328    outfile.createVariable('elevation', precision, ('number_of_points',))
3329
3330    #FIXME: Backwards compatibility
3331    outfile.createVariable('z', precision, ('number_of_points',))
3332    #################################
3333
3334    outfile.createVariable('volumes', Int, ('number_of_volumes',
3335                                            'number_of_vertices'))
3336
3337    outfile.createVariable('time', precision,
3338                           ('number_of_timesteps',))
3339
3340    outfile.createVariable('stage', precision,
3341                           ('number_of_timesteps',
3342                            'number_of_points'))
3343
3344    outfile.createVariable('xmomentum', precision,
3345                           ('number_of_timesteps',
3346                            'number_of_points'))
3347
3348    outfile.createVariable('ymomentum', precision,
3349                           ('number_of_timesteps',
3350                            'number_of_points'))
3351
3352    #Store
3353    from coordinate_transforms.redfearn import redfearn
3354    x = zeros(number_of_points, Float)  #Easting
3355    y = zeros(number_of_points, Float)  #Northing
3356
3357    if verbose: print 'Making triangular grid'
3358    #Get zone of 1st point.
3359    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3360
3361    vertices = {}
3362    i = 0
3363    for k, lat in enumerate(latitudes):
3364        for l, lon in enumerate(longitudes):
3365
3366            vertices[l,k] = i
3367
3368            zone, easting, northing = redfearn(lat,lon)
3369
3370            msg = 'Zone boundary crossed at longitude =', lon
3371            #assert zone == refzone, msg
3372            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3373            x[i] = easting
3374            y[i] = northing
3375            i += 1
3376
3377
3378    #Construct 2 triangles per 'rectangular' element
3379    volumes = []
3380    for l in range(number_of_longitudes-1):    #X direction
3381        for k in range(number_of_latitudes-1): #Y direction
3382            v1 = vertices[l,k+1]
3383            v2 = vertices[l,k]
3384            v3 = vertices[l+1,k+1]
3385            v4 = vertices[l+1,k]
3386
3387            #Note, this is different to the ferrit2sww code
3388            #since the order of the lats is reversed.
3389            volumes.append([v1,v3,v2]) #Upper element
3390            volumes.append([v4,v2,v3]) #Lower element
3391
3392    volumes = array(volumes)
3393
3394    geo_ref = Geo_reference(refzone,min(x),min(y))
3395    geo_ref.write_NetCDF(outfile)
3396
3397    # This will put the geo ref in the middle
3398    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3399
3400
3401    if verbose:
3402        print '------------------------------------------------'
3403        print 'More Statistics:'
3404        print '  Extent (/lon):'
3405        print '    x in [%f, %f], len(lat) == %d'\
3406              %(min(x), max(x),
3407                len(x))
3408        print '    y in [%f, %f], len(lon) == %d'\
3409              %(min(y), max(y),
3410                len(y))
3411        print 'geo_ref: ',geo_ref
3412
3413    z = resize(bath_grid,outfile.variables['z'][:].shape)
3414    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3415    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3416    outfile.variables['z'][:] = z
3417    outfile.variables['elevation'][:] = z  #FIXME HACK
3418    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3419
3420    # do this to create an ok sww file.
3421    #outfile.variables['time'][:] = [0]   #Store time relative
3422    #outfile.variables['stage'] = z
3423    # put the next line up in the code after outfile.order = 1
3424    #number_of_times = 1
3425
3426    stage = outfile.variables['stage']
3427    xmomentum = outfile.variables['xmomentum']
3428    ymomentum = outfile.variables['ymomentum']
3429
3430    outfile.variables['time'][:] = times   #Store time relative
3431
3432    if verbose: print 'Converting quantities'
3433    n = number_of_times
3434    for j in range(number_of_times):
3435        # load in files
3436        elevation_meta, elevation_grid = \
3437           _read_asc(elevation_dir + os.sep + elevation_files[j])
3438
3439        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3440        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3441
3442        #print "elevation_grid",elevation_grid
3443        #cut matrix to desired size
3444        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3445        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3446        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3447        #print "elevation_grid",elevation_grid
3448        # handle missing values
3449        missing = (elevation_grid == elevation_meta['NODATA_value'])
3450        if sometrue (missing):
3451            if fail_on_NaN:
3452                msg = 'File %s contains missing values'\
3453                      %(elevation_files[j])
3454                raise DataMissingValuesError, msg
3455            else:
3456                elevation_grid = elevation_grid*(missing==0) + \
3457                                 missing*elevation_NaN_filler
3458
3459
3460        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3461        i = 0
3462        for k in range(number_of_latitudes):      #Y direction
3463            for l in range(number_of_longitudes): #X direction
3464                w = zscale*elevation_grid[k,l] + mean_stage
3465                stage[j,i] = w
3466                h = w - z[i]
3467                xmomentum[j,i] = u_momentum_grid[k,l]*h
3468                ymomentum[j,i] = v_momentum_grid[k,l]*h
3469                i += 1
3470    outfile.close()
3471
3472def _get_min_max_indexes(latitudes,longitudes,
3473                        minlat=None, maxlat=None,
3474                        minlon=None, maxlon=None):
3475    """
3476    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3477    specified with min/max lat/long
3478
3479    Think of the latitudes and longitudes describing a 2d surface.
3480    The area returned is, if possible, just big enough to cover the
3481    inputed max/min area. (This will not be possible if the max/min area
3482    has a section outside of the latitudes/longitudes area.)
3483
3484    assume latitudess & longitudes are sorted,
3485    long - from low to high (west to east, eg 148 - 151)
3486    lat - from high to low (north to south, eg -35 - -38)
3487    """
3488
3489    # reverse order of lat, so it's in ascending order
3490    latitudes.reverse()
3491    largest_lat_index = len(latitudes)-1
3492    #Cut out a smaller extent.
3493    if minlat == None:
3494        lat_min_index = 0
3495    else:
3496        lat_min_index = searchsorted(latitudes, minlat)-1
3497        if lat_min_index <0:
3498            lat_min_index = 0
3499
3500
3501    if maxlat == None:
3502        lat_max_index = largest_lat_index #len(latitudes)
3503    else:
3504        lat_max_index = searchsorted(latitudes, maxlat)
3505        if lat_max_index > largest_lat_index:
3506            lat_max_index = largest_lat_index
3507
3508    if minlon == None:
3509        lon_min_index = 0
3510    else:
3511        lon_min_index = searchsorted(longitudes, minlon)-1
3512        if lon_min_index <0:
3513            lon_min_index = 0
3514
3515    if maxlon == None:
3516        lon_max_index = len(longitudes)
3517    else:
3518        lon_max_index = searchsorted(longitudes, maxlon)
3519
3520    #Take into account that the latitude list was reversed
3521    latitudes.reverse() # Python passes by reference, need to swap back
3522    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3523                                   largest_lat_index - lat_min_index
3524    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3525    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3526
3527    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3528
3529
3530def _read_asc(filename, verbose=False):
3531    """Read esri file from the following ASCII format (.asc)
3532
3533    Example:
3534
3535    ncols         3121
3536    nrows         1800
3537    xllcorner     722000
3538    yllcorner     5893000
3539    cellsize      25
3540    NODATA_value  -9999
3541    138.3698 137.4194 136.5062 135.5558 ..........
3542
3543    """
3544
3545    datafile = open(filename)
3546
3547    if verbose: print 'Reading DEM from %s' %(filename)
3548    lines = datafile.readlines()
3549    datafile.close()
3550
3551    if verbose: print 'Got', len(lines), ' lines'
3552
3553    ncols = int(lines.pop(0).split()[1].strip())
3554    nrows = int(lines.pop(0).split()[1].strip())
3555    xllcorner = float(lines.pop(0).split()[1].strip())
3556    yllcorner = float(lines.pop(0).split()[1].strip())
3557    cellsize = float(lines.pop(0).split()[1].strip())
3558    NODATA_value = float(lines.pop(0).split()[1].strip())
3559
3560    assert len(lines) == nrows
3561
3562    #Store data
3563    grid = []
3564
3565    n = len(lines)
3566    for i, line in enumerate(lines):
3567        cells = line.split()
3568        assert len(cells) == ncols
3569        grid.append(array([float(x) for x in cells]))
3570    grid = array(grid)
3571
3572    return {'xllcorner':xllcorner,
3573            'yllcorner':yllcorner,
3574            'cellsize':cellsize,
3575            'NODATA_value':NODATA_value}, grid
3576
3577def sww2timeseries(swwfile,
3578                   gauge_filename,
3579                   gauge_data_outname,
3580                   quantity = None,
3581                   time_min = None,
3582                   time_max = None,
3583                   verbose = False):
3584
3585    """Read SWW file and extract time series for prescribed quantities at
3586    gauge locations.
3587
3588    The gauge locations are defined in gauge_filename. This file should be
3589    in the form: gaugename, easting, northing, and should be stored in a
3590    .csv or .xya file.
3591
3592    Time series data at the gauges can be written to file (for example,
3593    Benfield requested raw data) with the default being no data written.
3594
3595    The parameter quantity must be the name of an existing quantity or
3596    an expression involving existing quantities. The default is
3597    'depth'.
3598
3599    The user can define a list of quantities. The possibilities are
3600    the conserved quantitues of the shallow water wave equation and other
3601    quantities which can be derived from those, i.e.
3602    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3603
3604    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3605    Note, units of momentum are m^2/s and depth is m.
3606
3607    Velocity is absolute momentum divided by depth. (Confirming correct units:
3608    vel = abs mom / depth = (m^2/s)/m = m/s.)
3609
3610    Bearing returns the angle of the velocity vector from North.
3611
3612    If time_min and time_max is given, output plotted in that time range.
3613    The default is to plot the entire range of the time evident in sww file.
3614
3615    The export graphic format in 'png' and will be stored in the same
3616    directory as the input file.
3617    """
3618
3619    if quantity is None: quantity = 'depth'
3620
3621    # extract gauge locations from gauge file
3622
3623    # extract all quantities from swwfile (f = file_function)
3624    if time_min is None: time_min = min(f.T)
3625    if time_max is None: time_max = max(f.T)
3626
3627    # loop through appropriate range of time
3628    # plot prescribed quantities and export data if requested
3629
3630    #if gauge_data_outname is None:
3631
3632    return
Note: See TracBrowser for help on using the repository browser.