source: inundation/pyvolution/data_manager.py @ 2877

Last change on this file since 2877 was 2852, checked in by ole, 19 years ago

Nailed the problem with tests failing when fixing duplicate timestep issue.

File size: 108.3 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51import exceptions
52class DataMissingValuesError(exceptions.Exception): pass
53class DataFileNotOpenError(exceptions.Exception): pass
54class DataTimeError(exceptions.Exception): pass
55class DataDomainError(exceptions.Exception): pass
56
57
58
59import os
60
61from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
62     searchsorted, zeros, allclose, around
63
64
65from coordinate_transforms.geo_reference import Geo_reference
66
67
68def make_filename(s):
69    """Transform argument string into a suitable filename
70    """
71
72    s = s.strip()
73    s = s.replace(' ', '_')
74    s = s.replace('(', '')
75    s = s.replace(')', '')
76    s = s.replace('__', '_')
77
78    return s
79
80
81def check_dir(path, verbose=None):
82    """Check that specified path exists.
83    If path does not exist it will be created if possible
84
85    USAGE:
86       checkdir(path, verbose):
87
88    ARGUMENTS:
89        path -- Directory
90        verbose -- Flag verbose output (default: None)
91
92    RETURN VALUE:
93        Verified path including trailing separator
94
95    """
96
97    import os, sys
98    import os.path
99
100    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
101        unix = 0
102    else:
103        unix = 1
104
105
106    if path[-1] != os.sep:
107        path = path + os.sep  # Add separator for directories
108
109    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
110    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
111        try:
112            exitcode=os.mkdir(path)
113
114            # Change access rights if possible
115            #
116            if unix:
117                exitcode=os.system('chmod 775 '+path)
118            else:
119                pass  # FIXME: What about acces rights under Windows?
120
121            if verbose: print 'MESSAGE: Directory', path, 'created.'
122
123        except:
124            print 'WARNING: Directory', path, 'could not be created.'
125            if unix:
126                path = '/tmp/'
127            else:
128                path = 'C:'
129
130            print 'Using directory %s instead' %path
131
132    return(path)
133
134
135
136def del_dir(path):
137    """Recursively delete directory path and all its contents
138    """
139
140    import os
141
142    if os.path.isdir(path):
143        for file in os.listdir(path):
144            X = os.path.join(path, file)
145
146
147            if os.path.isdir(X) and not os.path.islink(X):
148                del_dir(X)
149            else:
150                try:
151                    os.remove(X)
152                except:
153                    print "Could not remove file %s" %X
154
155        os.rmdir(path)
156
157
158
159def create_filename(datadir, filename, format, size=None, time=None):
160
161    import os
162    #from config import data_dir
163
164    FN = check_dir(datadir) + filename
165
166    if size is not None:
167        FN += '_size%d' %size
168
169    if time is not None:
170        FN += '_time%.2f' %time
171
172    FN += '.' + format
173    return FN
174
175
176def get_files(datadir, filename, format, size):
177    """Get all file (names) with gven name, size and format
178    """
179
180    import glob
181
182    import os
183    #from config import data_dir
184
185    dir = check_dir(datadir)
186
187    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
188    return glob.glob(pattern)
189
190
191
192#Generic class for storing output to e.g. visualisation or checkpointing
193class Data_format:
194    """Generic interface to data formats
195    """
196
197
198    def __init__(self, domain, extension, mode = 'w'):
199        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
200                                        '''   'w' (write)'''+\
201                                        '''   'r' (read)''' +\
202                                        '''   'a' (append)'''
203
204        #Create filename
205        #self.filename = create_filename(domain.get_datadir(),
206        #domain.get_name(), extension, len(domain))
207
208
209        self.filename = create_filename(domain.get_datadir(),
210            domain.get_name(), extension)
211
212        #print 'F', self.filename
213        self.timestep = 0
214        self.number_of_volumes = len(domain)
215        self.domain = domain
216
217
218        #FIXME: Should we have a general set_precision function?
219
220
221
222#Class for storing output to e.g. visualisation
223class Data_format_sww(Data_format):
224    """Interface to native NetCDF format (.sww) for storing model output
225
226    There are two kinds of data
227
228    1: Constant data: Vertex coordinates and field values. Stored once
229    2: Variable data: Conserved quantities. Stored once per timestep.
230
231    All data is assumed to reside at vertex locations.
232    """
233
234
235    def __init__(self, domain, mode = 'w',\
236                 max_size = 2000000000,
237                 recursion = False):
238        from Scientific.IO.NetCDF import NetCDFFile
239        from Numeric import Int, Float, Float32
240
241        self.precision = Float32 #Use single precision for quantities
242        if hasattr(domain, 'max_size'):
243            self.max_size = domain.max_size #file size max is 2Gig
244        else:
245            self.max_size = max_size
246        self.recursion = recursion
247        self.mode = mode
248
249        Data_format.__init__(self, domain, 'sww', mode)
250
251
252        # NetCDF file definition
253        fid = NetCDFFile(self.filename, mode)
254
255        if mode == 'w':
256
257            #Create new file
258            fid.institution = 'Geoscience Australia'
259            fid.description = 'Output from pyvolution suitable for plotting'
260
261            if domain.smooth:
262                fid.smoothing = 'Yes'
263            else:
264                fid.smoothing = 'No'
265
266            fid.order = domain.default_order
267
268            if hasattr(domain, 'texture'):
269                fid.texture = domain.texture
270        #else:
271            #    fid.texture = 'None'
272
273            #Reference point
274            #Start time in seconds since the epoch (midnight 1/1/1970)
275            #FIXME: Use Georef
276            fid.starttime = domain.starttime
277
278            # dimension definitions
279            fid.createDimension('number_of_volumes', self.number_of_volumes)
280            fid.createDimension('number_of_vertices', 3)
281
282            if domain.smooth is True:
283                fid.createDimension('number_of_points', len(domain.vertexlist))
284            else:
285                fid.createDimension('number_of_points', 3*self.number_of_volumes)
286
287            fid.createDimension('number_of_timesteps', None) #extensible
288
289            # variable definitions
290            fid.createVariable('x', self.precision, ('number_of_points',))
291            fid.createVariable('y', self.precision, ('number_of_points',))
292            fid.createVariable('elevation', self.precision, ('number_of_points',))
293            if domain.geo_reference is not None:
294                domain.geo_reference.write_NetCDF(fid)
295
296            #FIXME: Backwards compatibility
297            fid.createVariable('z', self.precision, ('number_of_points',))
298            #################################
299
300            fid.createVariable('volumes', Int, ('number_of_volumes',
301                                                'number_of_vertices'))
302
303            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
304                                               # close to each other may appear as the same step
305                               ('number_of_timesteps',))
306
307            fid.createVariable('stage', self.precision,
308                               ('number_of_timesteps',
309                                'number_of_points'))
310
311            fid.createVariable('xmomentum', self.precision,
312                               ('number_of_timesteps',
313                                'number_of_points'))
314
315            fid.createVariable('ymomentum', self.precision,
316                               ('number_of_timesteps',
317                                'number_of_points'))
318
319        #Close
320        fid.close()
321
322
323    def store_connectivity(self):
324        """Specialisation of store_connectivity for net CDF format
325
326        Writes x,y,z coordinates of triangles constituting
327        the bed elevation.
328        """
329
330        from Scientific.IO.NetCDF import NetCDFFile
331
332        from Numeric import concatenate, Int
333
334        domain = self.domain
335
336        #Get NetCDF
337        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
338
339        # Get the variables
340        x = fid.variables['x']
341        y = fid.variables['y']
342        z = fid.variables['elevation']
343
344        volumes = fid.variables['volumes']
345
346        # Get X, Y and bed elevation Z
347        Q = domain.quantities['elevation']
348        X,Y,Z,V = Q.get_vertex_values(xy=True,
349                      precision = self.precision)
350
351
352
353        x[:] = X.astype(self.precision)
354        y[:] = Y.astype(self.precision)
355        z[:] = Z.astype(self.precision)
356
357        #FIXME: Backwards compatibility
358        z = fid.variables['z']
359        z[:] = Z.astype(self.precision)
360        ################################
361
362        volumes[:] = V.astype(volumes.typecode())
363
364        #Close
365        fid.close()
366
367    def store_timestep(self, names):
368        """Store time and named quantities to file
369        """
370        from Scientific.IO.NetCDF import NetCDFFile
371        import types
372        from time import sleep
373        from os import stat
374
375        #minimum_allowed_depth = 0.001
376        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
377        from Numeric import choose
378
379        #Get NetCDF
380        retries = 0
381        file_open = False
382        while not file_open and retries < 10:
383            try:
384                fid = NetCDFFile(self.filename, 'a') #Open existing file
385            except IOError:
386                #This could happen if someone was reading the file.
387                #In that case, wait a while and try again
388                msg = 'Warning (store_timestep): File %s could not be opened'\
389                      %self.filename
390                msg += ' - trying step %s again' %self.domain.time
391                print msg
392                retries += 1
393                sleep(1)
394            else:
395                file_open = True
396
397        if not file_open:
398            msg = 'File %s could not be opened for append' %self.filename
399            raise DataFileNotOpenError, msg
400
401
402
403        #Check to see if the file is already too big:
404        time = fid.variables['time']
405        i = len(time)+1
406        file_size = stat(self.filename)[6]
407        file_size_increase =  file_size/i
408        if file_size + file_size_increase > self.max_size*(2**self.recursion):
409            #in order to get the file name and start time correct,
410            #I change the domian.filename and domain.starttime.
411            #This is the only way to do this without changing
412            #other modules (I think).
413
414            #write a filename addon that won't break swollens reader
415            #(10.sww is bad)
416            filename_ext = '_time_%s'%self.domain.time
417            filename_ext = filename_ext.replace('.', '_')
418            #remember the old filename, then give domain a
419            #name with the extension
420            old_domain_filename = self.domain.filename
421            if not self.recursion:
422                self.domain.filename = self.domain.filename+filename_ext
423
424            #change the domain starttime to the current time
425            old_domain_starttime = self.domain.starttime
426            self.domain.starttime = self.domain.time
427
428            #build a new data_structure.
429            next_data_structure=\
430                Data_format_sww(self.domain, mode=self.mode,\
431                                max_size = self.max_size,\
432                                recursion = self.recursion+1)
433            if not self.recursion:
434                print '    file_size = %s'%file_size
435                print '    saving file to %s'%next_data_structure.filename
436            #set up the new data_structure
437            self.domain.writer = next_data_structure
438
439            #FIXME - could be cleaner to use domain.store_timestep etc.
440            next_data_structure.store_connectivity()
441            next_data_structure.store_timestep(names)
442            fid.sync()
443            fid.close()
444
445            #restore the old starttime and filename
446            self.domain.starttime = old_domain_starttime
447            self.domain.filename = old_domain_filename
448        else:
449            self.recursion = False
450            domain = self.domain
451
452            # Get the variables
453            time = fid.variables['time']
454            stage = fid.variables['stage']
455            xmomentum = fid.variables['xmomentum']
456            ymomentum = fid.variables['ymomentum']
457            i = len(time)
458
459            #Store time
460            time[i] = self.domain.time
461
462
463            if type(names) not in [types.ListType, types.TupleType]:
464                names = [names]
465
466            for name in names:
467                # Get quantity
468                Q = domain.quantities[name]
469                A,V = Q.get_vertex_values(xy = False,
470                                          precision = self.precision)
471
472                #FIXME: Make this general (see below)
473                if name == 'stage':
474                    z = fid.variables['elevation']
475                    #print z[:]
476                    #print A-z[:]
477                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
478                    stage[i,:] = A.astype(self.precision)
479                elif name == 'xmomentum':
480                    xmomentum[i,:] = A.astype(self.precision)
481                elif name == 'ymomentum':
482                    ymomentum[i,:] = A.astype(self.precision)
483
484                #As in....
485            #eval( name + '[i,:] = A.astype(self.precision)' )
486            #FIXME: But we need a UNIT test for that before refactoring
487
488
489
490            #Flush and close
491            fid.sync()
492            fid.close()
493
494
495
496#Class for handling checkpoints data
497class Data_format_cpt(Data_format):
498    """Interface to native NetCDF format (.cpt)
499    """
500
501
502    def __init__(self, domain, mode = 'w'):
503        from Scientific.IO.NetCDF import NetCDFFile
504        from Numeric import Int, Float, Float
505
506        self.precision = Float #Use full precision
507
508        Data_format.__init__(self, domain, 'sww', mode)
509
510
511        # NetCDF file definition
512        fid = NetCDFFile(self.filename, mode)
513
514        if mode == 'w':
515            #Create new file
516            fid.institution = 'Geoscience Australia'
517            fid.description = 'Checkpoint data'
518            #fid.smooth = domain.smooth
519            fid.order = domain.default_order
520
521            # dimension definitions
522            fid.createDimension('number_of_volumes', self.number_of_volumes)
523            fid.createDimension('number_of_vertices', 3)
524
525            #Store info at all vertices (no smoothing)
526            fid.createDimension('number_of_points', 3*self.number_of_volumes)
527            fid.createDimension('number_of_timesteps', None) #extensible
528
529            # variable definitions
530
531            #Mesh
532            fid.createVariable('x', self.precision, ('number_of_points',))
533            fid.createVariable('y', self.precision, ('number_of_points',))
534
535
536            fid.createVariable('volumes', Int, ('number_of_volumes',
537                                                'number_of_vertices'))
538
539            fid.createVariable('time', self.precision,
540                               ('number_of_timesteps',))
541
542            #Allocate space for all quantities
543            for name in domain.quantities.keys():
544                fid.createVariable(name, self.precision,
545                                   ('number_of_timesteps',
546                                    'number_of_points'))
547
548        #Close
549        fid.close()
550
551
552    def store_checkpoint(self):
553        """
554        Write x,y coordinates of triangles.
555        Write connectivity (
556        constituting
557        the bed elevation.
558        """
559
560        from Scientific.IO.NetCDF import NetCDFFile
561
562        from Numeric import concatenate
563
564        domain = self.domain
565
566        #Get NetCDF
567        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
568
569        # Get the variables
570        x = fid.variables['x']
571        y = fid.variables['y']
572
573        volumes = fid.variables['volumes']
574
575        # Get X, Y and bed elevation Z
576        Q = domain.quantities['elevation']
577        X,Y,Z,V = Q.get_vertex_values(xy=True,
578                      precision = self.precision)
579
580
581
582        x[:] = X.astype(self.precision)
583        y[:] = Y.astype(self.precision)
584        z[:] = Z.astype(self.precision)
585
586        volumes[:] = V
587
588        #Close
589        fid.close()
590
591
592    def store_timestep(self, name):
593        """Store time and named quantity to file
594        """
595        from Scientific.IO.NetCDF import NetCDFFile
596        from time import sleep
597
598        #Get NetCDF
599        retries = 0
600        file_open = False
601        while not file_open and retries < 10:
602            try:
603                fid = NetCDFFile(self.filename, 'a') #Open existing file
604            except IOError:
605                #This could happen if someone was reading the file.
606                #In that case, wait a while and try again
607                msg = 'Warning (store_timestep): File %s could not be opened'\
608                  %self.filename
609                msg += ' - trying again'
610                print msg
611                retries += 1
612                sleep(1)
613            else:
614                file_open = True
615
616        if not file_open:
617            msg = 'File %s could not be opened for append' %self.filename
618            raise DataFileNotOPenError, msg
619
620
621        domain = self.domain
622
623        # Get the variables
624        time = fid.variables['time']
625        stage = fid.variables['stage']
626        i = len(time)
627
628        #Store stage
629        time[i] = self.domain.time
630
631        # Get quantity
632        Q = domain.quantities[name]
633        A,V = Q.get_vertex_values(xy=False,
634                  precision = self.precision)
635
636        stage[i,:] = A.astype(self.precision)
637
638        #Flush and close
639        fid.sync()
640        fid.close()
641
642
643
644
645
646#Function for storing xya output
647#FIXME Not done yet for this version
648class Data_format_xya(Data_format):
649    """Generic interface to data formats
650    """
651
652
653    def __init__(self, domain, mode = 'w'):
654        from Scientific.IO.NetCDF import NetCDFFile
655        from Numeric import Int, Float, Float32
656
657        self.precision = Float32 #Use single precision
658
659        Data_format.__init__(self, domain, 'xya', mode)
660
661
662
663    #FIXME -This is the old xya format
664    def store_all(self):
665        """Specialisation of store all for xya format
666
667        Writes x,y,z coordinates of triangles constituting
668        the bed elevation.
669        """
670
671        from Numeric import concatenate
672
673        domain = self.domain
674
675        fd = open(self.filename, 'w')
676
677
678        if domain.smooth is True:
679            number_of_points =  len(domain.vertexlist)
680        else:
681            number_of_points = 3*self.number_of_volumes
682
683        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
684
685        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
686                 " # <vertex #> <x> <y> [attributes]" + "\n")
687
688
689        # Get X, Y, bed elevation and friction (index=0,1)
690        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
691                                           indices = (0,1), precision = self.precision)
692
693        bed_eles = A[:,0]
694        fricts = A[:,1]
695
696        # Get stage (index=0)
697        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
698                                       indices = (0,), precision = self.precision)
699
700        stages = B[:,0]
701
702        #<vertex #> <x> <y> [attributes]
703        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
704                                               stages, fricts):
705
706            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
707            fd.write(s)
708
709        #close
710        fd.close()
711
712
713    def store_timestep(self, t, V0, V1, V2):
714        """Store time, water heights (and momentums) to file
715        """
716        pass
717
718
719#Auxiliary
720def write_obj(filename,x,y,z):
721    """Store x,y,z vectors into filename (obj format)
722       Vectors are assumed to have dimension (M,3) where
723       M corresponds to the number elements.
724       triangles are assumed to be disconnected
725
726       The three numbers in each vector correspond to three vertices,
727
728       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
729
730    """
731    #print 'Writing obj to %s' % filename
732
733    import os.path
734
735    root, ext = os.path.splitext(filename)
736    if ext == '.obj':
737        FN = filename
738    else:
739        FN = filename + '.obj'
740
741
742    outfile = open(FN, 'wb')
743    outfile.write("# Triangulation as an obj file\n")
744
745    M, N = x.shape
746    assert N==3  #Assuming three vertices per element
747
748    for i in range(M):
749        for j in range(N):
750            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
751
752    for i in range(M):
753        base = i*N
754        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
755
756    outfile.close()
757
758
759#########################################################
760#Conversion routines
761########################################################
762
763def sww2obj(basefilename, size):
764    """Convert netcdf based data output to obj
765    """
766    from Scientific.IO.NetCDF import NetCDFFile
767
768    from Numeric import Float, zeros
769
770    #Get NetCDF
771    FN = create_filename('.', basefilename, 'sww', size)
772    print 'Reading from ', FN
773    fid = NetCDFFile(FN, 'r')  #Open existing file for read
774
775
776    # Get the variables
777    x = fid.variables['x']
778    y = fid.variables['y']
779    z = fid.variables['elevation']
780    time = fid.variables['time']
781    stage = fid.variables['stage']
782
783    M = size  #Number of lines
784    xx = zeros((M,3), Float)
785    yy = zeros((M,3), Float)
786    zz = zeros((M,3), Float)
787
788    for i in range(M):
789        for j in range(3):
790            xx[i,j] = x[i+j*M]
791            yy[i,j] = y[i+j*M]
792            zz[i,j] = z[i+j*M]
793
794    #Write obj for bathymetry
795    FN = create_filename('.', basefilename, 'obj', size)
796    write_obj(FN,xx,yy,zz)
797
798
799    #Now read all the data with variable information, combine with
800    #x,y info and store as obj
801
802    for k in range(len(time)):
803        t = time[k]
804        print 'Processing timestep %f' %t
805
806        for i in range(M):
807            for j in range(3):
808                zz[i,j] = stage[k,i+j*M]
809
810
811        #Write obj for variable data
812        #FN = create_filename(basefilename, 'obj', size, time=t)
813        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
814        write_obj(FN,xx,yy,zz)
815
816
817def dat2obj(basefilename):
818    """Convert line based data output to obj
819    FIXME: Obsolete?
820    """
821
822    import glob, os
823    from config import data_dir
824
825
826    #Get bathymetry and x,y's
827    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
828
829    from Numeric import zeros, Float
830
831    M = len(lines)  #Number of lines
832    x = zeros((M,3), Float)
833    y = zeros((M,3), Float)
834    z = zeros((M,3), Float)
835
836    ##i = 0
837    for i, line in enumerate(lines):
838        tokens = line.split()
839        values = map(float,tokens)
840
841        for j in range(3):
842            x[i,j] = values[j*3]
843            y[i,j] = values[j*3+1]
844            z[i,j] = values[j*3+2]
845
846        ##i += 1
847
848
849    #Write obj for bathymetry
850    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
851
852
853    #Now read all the data files with variable information, combine with
854    #x,y info
855    #and store as obj
856
857    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
858
859    for filename in files:
860        print 'Processing %s' % filename
861
862        lines = open(data_dir+os.sep+filename,'r').readlines()
863        assert len(lines) == M
864        root, ext = os.path.splitext(filename)
865
866        #Get time from filename
867        i0 = filename.find('_time=')
868        if i0 == -1:
869            #Skip bathymetry file
870            continue
871
872        i0 += 6  #Position where time starts
873        i1 = filename.find('.dat')
874
875        if i1 > i0:
876            t = float(filename[i0:i1])
877        else:
878            raise DataTimeError, 'Hmmmm'
879
880
881
882        ##i = 0
883        for i, line in enumerate(lines):
884            tokens = line.split()
885            values = map(float,tokens)
886
887            for j in range(3):
888                z[i,j] = values[j]
889
890            ##i += 1
891
892        #Write obj for variable data
893        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
894
895
896def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
897    """Read netcdf filename1, pick timesteps first:step:last and save to
898    nettcdf file filename2
899    """
900    from Scientific.IO.NetCDF import NetCDFFile
901
902    #Get NetCDF
903    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
904    outfile = NetCDFFile(filename2, 'w')  #Open new file
905
906
907    #Copy dimensions
908    for d in infile.dimensions:
909        outfile.createDimension(d, infile.dimensions[d])
910
911    for name in infile.variables:
912        var = infile.variables[name]
913        outfile.createVariable(name, var.typecode(), var.dimensions)
914
915
916    #Copy the static variables
917    for name in infile.variables:
918        if name == 'time' or name == 'stage':
919            pass
920        else:
921            #Copy
922            outfile.variables[name][:] = infile.variables[name][:]
923
924    #Copy selected timesteps
925    time = infile.variables['time']
926    stage = infile.variables['stage']
927
928    newtime = outfile.variables['time']
929    newstage = outfile.variables['stage']
930
931    if last is None:
932        last = len(time)
933
934    selection = range(first, last, step)
935    for i, j in enumerate(selection):
936        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
937        newtime[i] = time[j]
938        newstage[i,:] = stage[j,:]
939
940    #Close
941    infile.close()
942    outfile.close()
943
944
945#Get data objects
946def get_dataobject(domain, mode='w'):
947    """Return instance of class of given format using filename
948    """
949
950    cls = eval('Data_format_%s' %domain.format)
951    return cls(domain, mode)
952
953def xya2pts(basename_in, basename_out=None, verbose=False,
954            #easting_min=None, easting_max=None,
955            #northing_min=None, northing_max=None,
956            stride = 1,
957            attribute_name = 'elevation',
958            z_func = None):
959    """Read points data from ascii (.xya)
960
961    Example:
962
963              x(m)        y(m)        z(m)
964         0.00000e+00  0.00000e+00  1.3535000e-01
965         0.00000e+00  1.40000e-02  1.3535000e-01
966
967
968
969    Convert to NetCDF pts format which is
970
971    points:  (Nx2) Float array
972    elevation: N Float array
973
974    Only lines that contain three numeric values are processed
975
976    If z_func is specified, it will be applied to the third field
977    """
978
979    import os
980    #from Scientific.IO.NetCDF import NetCDFFile
981    from Numeric import Float, arrayrange, concatenate
982
983    root, ext = os.path.splitext(basename_in)
984
985    if ext == '': ext = '.xya'
986
987    #Get NetCDF
988    infile = open(root + ext, 'r')  #Open existing xya file for read
989
990    if verbose: print 'Reading xya points from %s' %(root + ext)
991
992    points = []
993    attribute = []
994    for i, line in enumerate(infile.readlines()):
995
996        if i % stride != 0: continue
997
998        fields = line.split()
999
1000        try:
1001            assert len(fields) == 3
1002        except:
1003            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1004
1005        try:
1006            x = float( fields[0] )
1007            y = float( fields[1] )
1008            z = float( fields[2] )
1009        except:
1010            continue
1011
1012        points.append( [x, y] )
1013
1014        if callable(z_func):
1015            attribute.append(z_func(z))
1016        else:
1017            attribute.append(z)
1018
1019
1020    #Get output file
1021    if basename_out == None:
1022        ptsname = root + '.pts'
1023    else:
1024        ptsname = basename_out + '.pts'
1025
1026    if verbose: print 'Store to NetCDF file %s' %ptsname
1027    write_ptsfile(ptsname, points, attribute, attribute_name)
1028
1029
1030
1031######Obsoleted by export_points in load_mesh
1032def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1033                  zone=None, xllcorner=None, yllcorner=None):
1034    """Write points and associated attribute to pts (NetCDF) format
1035    """
1036
1037    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1038
1039    from Numeric import Float
1040
1041    if attribute_name is None:
1042        attribute_name = 'attribute'
1043
1044
1045    from Scientific.IO.NetCDF import NetCDFFile
1046
1047    # NetCDF file definition
1048    outfile = NetCDFFile(ptsname, 'w')
1049
1050
1051    #Create new file
1052    outfile.institution = 'Geoscience Australia'
1053    outfile.description = 'NetCDF pts format for compact and '\
1054                          'portable storage of spatial point data'
1055
1056
1057    #Georeferencing
1058    from coordinate_transforms.geo_reference import Geo_reference
1059    if zone is None:
1060        assert xllcorner is None, 'xllcorner must be None'
1061        assert yllcorner is None, 'yllcorner must be None'
1062        Geo_reference().write_NetCDF(outfile)
1063    else:
1064        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1065
1066
1067
1068    outfile.createDimension('number_of_points', len(points))
1069    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1070
1071    # variable definitions
1072    outfile.createVariable('points', Float, ('number_of_points',
1073                                             'number_of_dimensions'))
1074    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1075
1076    # Get handles to the variables
1077    nc_points = outfile.variables['points']
1078    nc_attribute = outfile.variables[attribute_name]
1079
1080    #Store data
1081    nc_points[:, :] = points
1082    nc_attribute[:] = attribute
1083
1084    outfile.close()
1085
1086
1087def dem2pts(basename_in, basename_out=None,
1088            easting_min=None, easting_max=None,
1089            northing_min=None, northing_max=None,
1090            use_cache=False, verbose=False,):
1091    """Read Digitial Elevation model from the following NetCDF format (.dem)
1092
1093    Example:
1094
1095    ncols         3121
1096    nrows         1800
1097    xllcorner     722000
1098    yllcorner     5893000
1099    cellsize      25
1100    NODATA_value  -9999
1101    138.3698 137.4194 136.5062 135.5558 ..........
1102
1103    Convert to NetCDF pts format which is
1104
1105    points:  (Nx2) Float array
1106    elevation: N Float array
1107    """
1108
1109
1110
1111    kwargs = {'basename_out': basename_out,
1112              'easting_min': easting_min,
1113              'easting_max': easting_max,
1114              'northing_min': northing_min,
1115              'northing_max': northing_max,
1116              'verbose': verbose}
1117
1118    if use_cache is True:
1119        from caching import cache
1120        result = cache(_dem2pts, basename_in, kwargs,
1121                       dependencies = [basename_in + '.dem'],
1122                       verbose = verbose)
1123
1124    else:
1125        result = apply(_dem2pts, [basename_in], kwargs)
1126
1127    return result
1128
1129
1130def _dem2pts(basename_in, basename_out=None, verbose=False,
1131            easting_min=None, easting_max=None,
1132            northing_min=None, northing_max=None):
1133    """Read Digitial Elevation model from the following NetCDF format (.dem)
1134
1135    Internal function. See public function dem2pts for details.
1136    """
1137
1138    #FIXME: Can this be written feasibly using write_pts?
1139
1140    import os
1141    from Scientific.IO.NetCDF import NetCDFFile
1142    from Numeric import Float, zeros, reshape, sum
1143
1144    root = basename_in
1145
1146    #Get NetCDF
1147    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1148
1149    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1150
1151    ncols = infile.ncols[0]
1152    nrows = infile.nrows[0]
1153    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1154    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1155    cellsize = infile.cellsize[0]
1156    NODATA_value = infile.NODATA_value[0]
1157    dem_elevation = infile.variables['elevation']
1158
1159    zone = infile.zone[0]
1160    false_easting = infile.false_easting[0]
1161    false_northing = infile.false_northing[0]
1162
1163    #Text strings
1164    projection = infile.projection
1165    datum = infile.datum
1166    units = infile.units
1167
1168
1169    #Get output file
1170    if basename_out == None:
1171        ptsname = root + '.pts'
1172    else:
1173        ptsname = basename_out + '.pts'
1174
1175    if verbose: print 'Store to NetCDF file %s' %ptsname
1176    # NetCDF file definition
1177    outfile = NetCDFFile(ptsname, 'w')
1178
1179    #Create new file
1180    outfile.institution = 'Geoscience Australia'
1181    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1182                      'of spatial point data'
1183    #assign default values
1184    if easting_min is None: easting_min = xllcorner
1185    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1186    if northing_min is None: northing_min = yllcorner
1187    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1188
1189    #compute offsets to update georeferencing
1190    easting_offset = xllcorner - easting_min
1191    northing_offset = yllcorner - northing_min
1192
1193    #Georeferencing
1194    outfile.zone = zone
1195    outfile.xllcorner = easting_min #Easting of lower left corner
1196    outfile.yllcorner = northing_min #Northing of lower left corner
1197    outfile.false_easting = false_easting
1198    outfile.false_northing = false_northing
1199
1200    outfile.projection = projection
1201    outfile.datum = datum
1202    outfile.units = units
1203
1204
1205    #Grid info (FIXME: probably not going to be used, but heck)
1206    outfile.ncols = ncols
1207    outfile.nrows = nrows
1208
1209    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1210    totalnopoints = nrows*ncols
1211
1212    # calculating number of NODATA_values for each row in clipped region
1213    #FIXME: use array operations to do faster
1214    nn = 0
1215    k = 0
1216    i1_0 = 0
1217    j1_0 = 0
1218    thisj = 0
1219    thisi = 0
1220    for i in range(nrows):
1221        y = (nrows-i-1)*cellsize + yllcorner
1222        for j in range(ncols):
1223            x = j*cellsize + xllcorner
1224            if easting_min <= x <= easting_max and \
1225               northing_min <= y <= northing_max:
1226                thisj = j
1227                thisi = i
1228                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1229
1230                if k == 0:
1231                    i1_0 = i
1232                    j1_0 = j
1233                k += 1
1234
1235    index1 = j1_0
1236    index2 = thisj
1237
1238    # dimension definitions
1239    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1240    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1241
1242    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1243    nopoints = clippednopoints-nn
1244
1245    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1246
1247    if verbose and nn > 0:
1248        print 'There are %d values in the elevation' %totalnopoints
1249        print 'There are %d values in the clipped elevation' %clippednopoints
1250        print 'There are %d NODATA_values in the clipped elevation' %nn
1251
1252    outfile.createDimension('number_of_points', nopoints)
1253    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1254
1255    # variable definitions
1256    outfile.createVariable('points', Float, ('number_of_points',
1257                                             'number_of_dimensions'))
1258    outfile.createVariable('elevation', Float, ('number_of_points',))
1259
1260    # Get handles to the variables
1261    points = outfile.variables['points']
1262    elevation = outfile.variables['elevation']
1263
1264    lenv = index2-index1+1
1265    #Store data
1266    global_index = 0
1267    #for i in range(nrows):
1268    for i in range(i1_0,thisi+1,1):
1269        if verbose and i%((nrows+10)/10)==0:
1270            print 'Processing row %d of %d' %(i, nrows)
1271
1272        lower_index = global_index
1273
1274        v = dem_elevation_r[i,index1:index2+1]
1275        no_NODATA = sum(v == NODATA_value)
1276        if no_NODATA > 0:
1277            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1278        else:
1279            newcols = lenv #ncols_in_bounding_box
1280
1281        telev = zeros(newcols, Float)
1282        tpoints = zeros((newcols, 2), Float)
1283
1284        local_index = 0
1285
1286        y = (nrows-i-1)*cellsize + yllcorner
1287        #for j in range(ncols):
1288        for j in range(j1_0,index2+1,1):
1289
1290            x = j*cellsize + xllcorner
1291            if easting_min <= x <= easting_max and \
1292               northing_min <= y <= northing_max and \
1293               dem_elevation_r[i,j] <> NODATA_value:
1294                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1295                telev[local_index] = dem_elevation_r[i, j]
1296                global_index += 1
1297                local_index += 1
1298
1299        upper_index = global_index
1300
1301        if upper_index == lower_index + newcols:
1302            points[lower_index:upper_index, :] = tpoints
1303            elevation[lower_index:upper_index] = telev
1304
1305    assert global_index == nopoints, 'index not equal to number of points'
1306
1307    infile.close()
1308    outfile.close()
1309
1310
1311
1312def _read_hecras_cross_sections(lines):
1313    """Return block of surface lines for each cross section
1314    Starts with SURFACE LINE,
1315    Ends with END CROSS-SECTION
1316    """
1317
1318    points = []
1319
1320    reading_surface = False
1321    for i, line in enumerate(lines):
1322
1323        if len(line.strip()) == 0:    #Ignore blanks
1324            continue
1325
1326        if lines[i].strip().startswith('SURFACE LINE'):
1327            reading_surface = True
1328            continue
1329
1330        if lines[i].strip().startswith('END') and reading_surface:
1331            yield points
1332            reading_surface = False
1333            points = []
1334
1335        if reading_surface:
1336            fields = line.strip().split(',')
1337            easting = float(fields[0])
1338            northing = float(fields[1])
1339            elevation = float(fields[2])
1340            points.append([easting, northing, elevation])
1341
1342
1343
1344
1345def hecras_cross_sections2pts(basename_in,
1346                              basename_out=None,
1347                              verbose=False):
1348    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1349
1350    Example:
1351
1352
1353# RAS export file created on Mon 15Aug2005 11:42
1354# by HEC-RAS Version 3.1.1
1355
1356BEGIN HEADER:
1357  UNITS: METRIC
1358  DTM TYPE: TIN
1359  DTM: v:\1\cit\perth_topo\river_tin
1360  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1361  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1362  MAP PROJECTION: UTM
1363  PROJECTION ZONE: 50
1364  DATUM: AGD66
1365  VERTICAL DATUM:
1366  NUMBER OF REACHES:  19
1367  NUMBER OF CROSS-SECTIONS:  14206
1368END HEADER:
1369
1370
1371Only the SURFACE LINE data of the following form will be utilised
1372
1373  CROSS-SECTION:
1374    STREAM ID:Southern-Wungong
1375    REACH ID:Southern-Wungong
1376    STATION:19040.*
1377    CUT LINE:
1378      405548.671603161 , 6438142.7594925
1379      405734.536092045 , 6438326.10404912
1380      405745.130459356 , 6438331.48627354
1381      405813.89633823 , 6438368.6272789
1382    SURFACE LINE:
1383     405548.67,   6438142.76,   35.37
1384     405552.24,   6438146.28,   35.41
1385     405554.78,   6438148.78,   35.44
1386     405555.80,   6438149.79,   35.44
1387     405559.37,   6438153.31,   35.45
1388     405560.88,   6438154.81,   35.44
1389     405562.93,   6438156.83,   35.42
1390     405566.50,   6438160.35,   35.38
1391     405566.99,   6438160.83,   35.37
1392     ...
1393   END CROSS-SECTION
1394
1395    Convert to NetCDF pts format which is
1396
1397    points:  (Nx2) Float array
1398    elevation: N Float array
1399    """
1400
1401    #FIXME: Can this be written feasibly using write_pts?
1402
1403    import os
1404    from Scientific.IO.NetCDF import NetCDFFile
1405    from Numeric import Float, zeros, reshape
1406
1407    root = basename_in
1408
1409    #Get ASCII file
1410    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1411
1412    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1413
1414    lines = infile.readlines()
1415    infile.close()
1416
1417    if verbose: print 'Converting to pts format'
1418
1419    i = 0
1420    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1421        i += 1
1422
1423    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('UNITS:')
1427    units = lines[i].strip().split()[1]
1428    i += 1
1429
1430    assert lines[i].strip().upper().startswith('DTM TYPE:')
1431    i += 1
1432
1433    assert lines[i].strip().upper().startswith('DTM:')
1434    i += 1
1435
1436    assert lines[i].strip().upper().startswith('STREAM')
1437    i += 1
1438
1439    assert lines[i].strip().upper().startswith('CROSS')
1440    i += 1
1441
1442    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1443    projection = lines[i].strip().split(':')[1]
1444    i += 1
1445
1446    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1447    zone = int(lines[i].strip().split(':')[1])
1448    i += 1
1449
1450    assert lines[i].strip().upper().startswith('DATUM:')
1451    datum = lines[i].strip().split(':')[1]
1452    i += 1
1453
1454    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1455    i += 1
1456
1457    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1458    i += 1
1459
1460    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1461    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1462    i += 1
1463
1464
1465    #Now read all points
1466    points = []
1467    elevation = []
1468    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1469        for k, entry in enumerate(entries):
1470            points.append(entry[:2])
1471            elevation.append(entry[2])
1472
1473
1474    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1475          %(j+1, number_of_cross_sections)
1476    assert j+1 == number_of_cross_sections, msg
1477
1478    #Get output file
1479    if basename_out == None:
1480        ptsname = root + '.pts'
1481    else:
1482        ptsname = basename_out + '.pts'
1483
1484    #FIXME (DSG-ON): use loadASCII export_points_file
1485    if verbose: print 'Store to NetCDF file %s' %ptsname
1486    # NetCDF file definition
1487    outfile = NetCDFFile(ptsname, 'w')
1488
1489    #Create new file
1490    outfile.institution = 'Geoscience Australia'
1491    outfile.description = 'NetCDF pts format for compact and portable ' +\
1492                          'storage of spatial point data derived from HEC-RAS'
1493
1494    #Georeferencing
1495    outfile.zone = zone
1496    outfile.xllcorner = 0.0
1497    outfile.yllcorner = 0.0
1498    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1499    outfile.false_northing = 1000000
1500
1501    outfile.projection = projection
1502    outfile.datum = datum
1503    outfile.units = units
1504
1505
1506    outfile.createDimension('number_of_points', len(points))
1507    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1508
1509    # variable definitions
1510    outfile.createVariable('points', Float, ('number_of_points',
1511                                             'number_of_dimensions'))
1512    outfile.createVariable('elevation', Float, ('number_of_points',))
1513
1514    # Get handles to the variables
1515    outfile.variables['points'][:] = points
1516    outfile.variables['elevation'][:] = elevation
1517
1518    outfile.close()
1519
1520
1521
1522def sww2dem(basename_in, basename_out = None,
1523            quantity = None,
1524            timestep = None,
1525            reduction = None,
1526            cellsize = 10,
1527            NODATA_value = -9999,
1528            easting_min = None,
1529            easting_max = None,
1530            northing_min = None,
1531            northing_max = None,
1532            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1533            verbose = False,
1534            origin = None,
1535            datum = 'WGS84',
1536        format = 'ers'):
1537
1538    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1539
1540    Example (ASC):
1541
1542    ncols         3121
1543    nrows         1800
1544    xllcorner     722000
1545    yllcorner     5893000
1546    cellsize      25
1547    NODATA_value  -9999
1548    138.3698 137.4194 136.5062 135.5558 ..........
1549
1550    Also write accompanying file with same basename_in but extension .prj
1551    used to fix the UTM zone, datum, false northings and eastings.
1552
1553    The prj format is assumed to be as
1554
1555    Projection    UTM
1556    Zone          56
1557    Datum         WGS84
1558    Zunits        NO
1559    Units         METERS
1560    Spheroid      WGS84
1561    Xshift        0.0000000000
1562    Yshift        10000000.0000000000
1563    Parameters
1564
1565
1566    The parameter quantity must be the name of an existing quantity or
1567    an expression involving existing quantities. The default is
1568    'elevation'.
1569
1570    if timestep (an index) is given, output quantity at that timestep
1571
1572    if reduction is given use that to reduce quantity over all timesteps.
1573
1574    datum
1575
1576    format can be either 'asc' or 'ers'
1577    """
1578
1579    import sys
1580    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1581    from Numeric import array2string
1582
1583    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1584    from util import apply_expression_to_dictionary
1585
1586    msg = 'Format must be either asc or ers'
1587    assert format.lower() in ['asc', 'ers'], msg
1588
1589
1590    false_easting = 500000
1591    false_northing = 10000000
1592
1593    if quantity is None:
1594        quantity = 'elevation'
1595
1596    if reduction is None:
1597        reduction = max
1598
1599    if basename_out is None:
1600        basename_out = basename_in + '_%s' %quantity
1601
1602    swwfile = basename_in + '.sww'
1603    demfile = basename_out + '.' + format
1604    # Note the use of a .ers extension is optional (write_ermapper_grid will
1605    # deal with either option
1606
1607    #Read sww file
1608    if verbose: print 'Reading from %s' %swwfile
1609    from Scientific.IO.NetCDF import NetCDFFile
1610    fid = NetCDFFile(swwfile)
1611
1612    #Get extent and reference
1613    x = fid.variables['x'][:]
1614    y = fid.variables['y'][:]
1615    volumes = fid.variables['volumes'][:]
1616
1617    number_of_timesteps = fid.dimensions['number_of_timesteps']
1618    number_of_points = fid.dimensions['number_of_points']
1619    if origin is None:
1620
1621        #Get geo_reference
1622        #sww files don't have to have a geo_ref
1623        try:
1624            geo_reference = Geo_reference(NetCDFObject=fid)
1625        except AttributeError, e:
1626            geo_reference = Geo_reference() #Default georef object
1627
1628        xllcorner = geo_reference.get_xllcorner()
1629        yllcorner = geo_reference.get_yllcorner()
1630        zone = geo_reference.get_zone()
1631    else:
1632        zone = origin[0]
1633        xllcorner = origin[1]
1634        yllcorner = origin[2]
1635
1636
1637
1638    #FIXME: Refactor using code from file_function.statistics
1639    #Something like print swwstats(swwname)
1640    if verbose:
1641        x = fid.variables['x'][:]
1642        y = fid.variables['y'][:]
1643        times = fid.variables['time'][:]
1644        print '------------------------------------------------'
1645        print 'Statistics of SWW file:'
1646        print '  Name: %s' %swwfile
1647        print '  Reference:'
1648        print '    Lower left corner: [%f, %f]'\
1649              %(xllcorner, yllcorner)
1650        print '    Start time: %f' %fid.starttime[0]
1651        print '  Extent:'
1652        print '    x [m] in [%f, %f], len(x) == %d'\
1653              %(min(x.flat), max(x.flat), len(x.flat))
1654        print '    y [m] in [%f, %f], len(y) == %d'\
1655              %(min(y.flat), max(y.flat), len(y.flat))
1656        print '    t [s] in [%f, %f], len(t) == %d'\
1657              %(min(times), max(times), len(times))
1658        print '  Quantities [SI units]:'
1659        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1660            q = fid.variables[name][:].flat
1661            print '    %s in [%f, %f]' %(name, min(q), max(q))
1662
1663
1664
1665
1666
1667    #Get quantity and reduce if applicable
1668    if verbose: print 'Processing quantity %s' %quantity
1669
1670    #Turn NetCDF objects into Numeric arrays
1671    quantity_dict = {}
1672    for name in fid.variables.keys():
1673        quantity_dict[name] = fid.variables[name][:]
1674
1675
1676    q = apply_expression_to_dictionary(quantity, quantity_dict)
1677
1678
1679
1680    if len(q.shape) == 2:
1681        #q has a time component and needs to be reduced along
1682        #the temporal dimension
1683        if verbose: print 'Reducing quantity %s' %quantity
1684        q_reduced = zeros( number_of_points, Float )
1685
1686        for k in range(number_of_points):
1687            q_reduced[k] = reduction( q[:,k] )
1688
1689        q = q_reduced
1690
1691    #Post condition: Now q has dimension: number_of_points
1692    assert len(q.shape) == 1
1693    assert q.shape[0] == number_of_points
1694
1695
1696    if verbose:
1697        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1698
1699
1700    #Create grid and update xll/yll corner and x,y
1701
1702    #Relative extent
1703    if easting_min is None:
1704        xmin = min(x)
1705    else:
1706        xmin = easting_min - xllcorner
1707
1708    if easting_max is None:
1709        xmax = max(x)
1710    else:
1711        xmax = easting_max - xllcorner
1712
1713    if northing_min is None:
1714        ymin = min(y)
1715    else:
1716        ymin = northing_min - yllcorner
1717
1718    if northing_max is None:
1719        ymax = max(y)
1720    else:
1721        ymax = northing_max - yllcorner
1722
1723
1724
1725    if verbose: print 'Creating grid'
1726    ncols = int((xmax-xmin)/cellsize)+1
1727    nrows = int((ymax-ymin)/cellsize)+1
1728
1729
1730    #New absolute reference and coordinates
1731    newxllcorner = xmin+xllcorner
1732    newyllcorner = ymin+yllcorner
1733
1734    x = x+xllcorner-newxllcorner
1735    y = y+yllcorner-newyllcorner
1736
1737    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1738    assert len(vertex_points.shape) == 2
1739
1740
1741
1742    grid_points = zeros ( (ncols*nrows, 2), Float )
1743
1744
1745    for i in xrange(nrows):
1746        if format.lower() == 'asc':
1747            yg = i*cellsize
1748        else:
1749        #this will flip the order of the y values for ers
1750            yg = (nrows-i)*cellsize
1751
1752        for j in xrange(ncols):
1753            xg = j*cellsize
1754            k = i*ncols + j
1755
1756            grid_points[k,0] = xg
1757            grid_points[k,1] = yg
1758
1759    #Interpolate
1760    #from least_squares import Interpolation
1761    from fit_interpolate.interpolate import Interpolate
1762
1763
1764    interp = Interpolate(vertex_points, volumes, verbose = verbose)
1765
1766
1767
1768    #Interpolate using quantity values
1769    if verbose: print 'Interpolating'
1770    grid_values = interp.interpolate(q, grid_points).flat
1771
1772
1773    if verbose:
1774        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1775                                                      max(grid_values))
1776
1777    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1778    P = interp.mesh.get_boundary_polygon()
1779    outside_indices = outside_polygon(grid_points, P, closed=True)
1780
1781    for i in outside_indices:
1782        grid_values[i] = NODATA_value
1783
1784
1785
1786
1787    if format.lower() == 'ers':
1788        # setup ERS header information
1789        grid_values = reshape(grid_values,(nrows, ncols))
1790        header = {}
1791        header['datum'] = '"' + datum + '"'
1792        # FIXME The use of hardwired UTM and zone number needs to be made optional
1793        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1794        header['projection'] = '"UTM-' + str(zone) + '"'
1795        header['coordinatetype'] = 'EN'
1796        if header['coordinatetype'] == 'LL':
1797            header['longitude'] = str(newxllcorner)
1798            header['latitude'] = str(newyllcorner)
1799        elif header['coordinatetype'] == 'EN':
1800            header['eastings'] = str(newxllcorner)
1801            header['northings'] = str(newyllcorner)
1802        header['nullcellvalue'] = str(NODATA_value)
1803        header['xdimension'] = str(cellsize)
1804        header['ydimension'] = str(cellsize)
1805        header['value'] = '"' + quantity + '"'
1806        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1807
1808
1809        #Write
1810        if verbose: print 'Writing %s' %demfile
1811        import ermapper_grids
1812        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1813
1814        fid.close()
1815    else:
1816        #Write to Ascii format
1817
1818        #Write prj file
1819        prjfile = basename_out + '.prj'
1820
1821        if verbose: print 'Writing %s' %prjfile
1822        prjid = open(prjfile, 'w')
1823        prjid.write('Projection    %s\n' %'UTM')
1824        prjid.write('Zone          %d\n' %zone)
1825        prjid.write('Datum         %s\n' %datum)
1826        prjid.write('Zunits        NO\n')
1827        prjid.write('Units         METERS\n')
1828        prjid.write('Spheroid      %s\n' %datum)
1829        prjid.write('Xshift        %d\n' %false_easting)
1830        prjid.write('Yshift        %d\n' %false_northing)
1831        prjid.write('Parameters\n')
1832        prjid.close()
1833
1834
1835
1836        if verbose: print 'Writing %s' %demfile
1837
1838        ascid = open(demfile, 'w')
1839
1840        ascid.write('ncols         %d\n' %ncols)
1841        ascid.write('nrows         %d\n' %nrows)
1842        ascid.write('xllcorner     %d\n' %newxllcorner)
1843        ascid.write('yllcorner     %d\n' %newyllcorner)
1844        ascid.write('cellsize      %f\n' %cellsize)
1845        ascid.write('NODATA_value  %d\n' %NODATA_value)
1846
1847
1848        #Get bounding polygon from mesh
1849        #P = interp.mesh.get_boundary_polygon()
1850        #inside_indices = inside_polygon(grid_points, P)
1851
1852        for i in range(nrows):
1853            if verbose and i%((nrows+10)/10)==0:
1854                print 'Doing row %d of %d' %(i, nrows)
1855
1856            base_index = (nrows-i-1)*ncols
1857
1858            slice = grid_values[base_index:base_index+ncols]
1859            s = array2string(slice, max_line_width=sys.maxint)
1860            ascid.write(s[1:-1] + '\n')
1861
1862
1863            #print
1864            #for j in range(ncols):
1865            #    index = base_index+j#
1866            #    print grid_values[index],
1867            #    ascid.write('%f ' %grid_values[index])
1868            #ascid.write('\n')
1869
1870        #Close
1871        ascid.close()
1872        fid.close()
1873
1874#Backwards compatibility
1875def sww2asc(basename_in, basename_out = None,
1876            quantity = None,
1877            timestep = None,
1878            reduction = None,
1879            cellsize = 10,
1880            verbose = False,
1881            origin = None):
1882    print 'sww2asc will soon be obsoleted - please use sww2dem'
1883    sww2dem(basename_in,
1884            basename_out = basename_out,
1885            quantity = quantity,
1886            timestep = timestep,
1887            reduction = reduction,
1888            cellsize = cellsize,
1889            verbose = verbose,
1890            origin = origin,
1891        datum = 'WGS84',
1892        format = 'asc')
1893
1894def sww2ers(basename_in, basename_out = None,
1895            quantity = None,
1896            timestep = None,
1897            reduction = None,
1898            cellsize = 10,
1899            verbose = False,
1900            origin = None,
1901            datum = 'WGS84'):
1902    print 'sww2ers will soon be obsoleted - please use sww2dem'
1903    sww2dem(basename_in,
1904            basename_out = basename_out,
1905            quantity = quantity,
1906            timestep = timestep,
1907            reduction = reduction,
1908            cellsize = cellsize,
1909            verbose = verbose,
1910            origin = origin,
1911        datum = datum,
1912        format = 'ers')
1913################################# END COMPATIBILITY ##############
1914
1915
1916
1917def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1918                                  use_cache = False,
1919                                  verbose = False):
1920    """Read Digitial Elevation model from the following ASCII format (.asc)
1921
1922    Example:
1923
1924    ncols         3121
1925    nrows         1800
1926    xllcorner     722000
1927    yllcorner     5893000
1928    cellsize      25
1929    NODATA_value  -9999
1930    138.3698 137.4194 136.5062 135.5558 ..........
1931
1932    Convert basename_in + '.asc' to NetCDF format (.dem)
1933    mimicking the ASCII format closely.
1934
1935
1936    An accompanying file with same basename_in but extension .prj must exist
1937    and is used to fix the UTM zone, datum, false northings and eastings.
1938
1939    The prj format is assumed to be as
1940
1941    Projection    UTM
1942    Zone          56
1943    Datum         WGS84
1944    Zunits        NO
1945    Units         METERS
1946    Spheroid      WGS84
1947    Xshift        0.0000000000
1948    Yshift        10000000.0000000000
1949    Parameters
1950    """
1951
1952
1953
1954    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1955
1956    if use_cache is True:
1957        from caching import cache
1958        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1959                       dependencies = [basename_in + '.asc'],
1960                       verbose = verbose)
1961
1962    else:
1963        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1964
1965    return result
1966
1967
1968
1969
1970
1971
1972def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1973                                  verbose = False):
1974    """Read Digitial Elevation model from the following ASCII format (.asc)
1975
1976    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1977    """
1978
1979    import os
1980    from Scientific.IO.NetCDF import NetCDFFile
1981    from Numeric import Float, array
1982
1983    #root, ext = os.path.splitext(basename_in)
1984    root = basename_in
1985
1986    ###########################################
1987    # Read Meta data
1988    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1989    metadatafile = open(root + '.prj')
1990    metalines = metadatafile.readlines()
1991    metadatafile.close()
1992
1993    L = metalines[0].strip().split()
1994    assert L[0].strip().lower() == 'projection'
1995    projection = L[1].strip()                   #TEXT
1996
1997    L = metalines[1].strip().split()
1998    assert L[0].strip().lower() == 'zone'
1999    zone = int(L[1].strip())
2000
2001    L = metalines[2].strip().split()
2002    assert L[0].strip().lower() == 'datum'
2003    datum = L[1].strip()                        #TEXT
2004
2005    L = metalines[3].strip().split()
2006    assert L[0].strip().lower() == 'zunits'     #IGNORE
2007    zunits = L[1].strip()                       #TEXT
2008
2009    L = metalines[4].strip().split()
2010    assert L[0].strip().lower() == 'units'
2011    units = L[1].strip()                        #TEXT
2012
2013    L = metalines[5].strip().split()
2014    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2015    spheroid = L[1].strip()                     #TEXT
2016
2017    L = metalines[6].strip().split()
2018    assert L[0].strip().lower() == 'xshift'
2019    false_easting = float(L[1].strip())
2020
2021    L = metalines[7].strip().split()
2022    assert L[0].strip().lower() == 'yshift'
2023    false_northing = float(L[1].strip())
2024
2025    #print false_easting, false_northing, zone, datum
2026
2027
2028    ###########################################
2029    #Read DEM data
2030
2031    datafile = open(basename_in + '.asc')
2032
2033    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2034    lines = datafile.readlines()
2035    datafile.close()
2036
2037    if verbose: print 'Got', len(lines), ' lines'
2038
2039    ncols = int(lines[0].split()[1].strip())
2040    nrows = int(lines[1].split()[1].strip())
2041    xllcorner = float(lines[2].split()[1].strip())
2042    yllcorner = float(lines[3].split()[1].strip())
2043    cellsize = float(lines[4].split()[1].strip())
2044    NODATA_value = int(lines[5].split()[1].strip())
2045
2046    assert len(lines) == nrows + 6
2047
2048
2049    ##########################################
2050
2051
2052    if basename_out == None:
2053        netcdfname = root + '.dem'
2054    else:
2055        netcdfname = basename_out + '.dem'
2056
2057    if verbose: print 'Store to NetCDF file %s' %netcdfname
2058    # NetCDF file definition
2059    fid = NetCDFFile(netcdfname, 'w')
2060
2061    #Create new file
2062    fid.institution = 'Geoscience Australia'
2063    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2064                      'of spatial point data'
2065
2066    fid.ncols = ncols
2067    fid.nrows = nrows
2068    fid.xllcorner = xllcorner
2069    fid.yllcorner = yllcorner
2070    fid.cellsize = cellsize
2071    fid.NODATA_value = NODATA_value
2072
2073    fid.zone = zone
2074    fid.false_easting = false_easting
2075    fid.false_northing = false_northing
2076    fid.projection = projection
2077    fid.datum = datum
2078    fid.units = units
2079
2080
2081    # dimension definitions
2082    fid.createDimension('number_of_rows', nrows)
2083    fid.createDimension('number_of_columns', ncols)
2084
2085    # variable definitions
2086    fid.createVariable('elevation', Float, ('number_of_rows',
2087                                            'number_of_columns'))
2088
2089    # Get handles to the variables
2090    elevation = fid.variables['elevation']
2091
2092    #Store data
2093    n = len(lines[6:])
2094    for i, line in enumerate(lines[6:]):
2095        fields = line.split()
2096        if verbose and i%((n+10)/10)==0:
2097            print 'Processing row %d of %d' %(i, nrows)
2098
2099        elevation[i, :] = array([float(x) for x in fields])
2100
2101    fid.close()
2102
2103
2104
2105
2106
2107def ferret2sww(basename_in, basename_out = None,
2108               verbose = False,
2109               minlat = None, maxlat = None,
2110               minlon = None, maxlon = None,
2111               mint = None, maxt = None, mean_stage = 0,
2112               origin = None, zscale = 1,
2113               fail_on_NaN = True,
2114               NaN_filler = 0,
2115               elevation = None,
2116               inverted_bathymetry = False
2117               ): #FIXME: Bathymetry should be obtained
2118                                  #from MOST somehow.
2119                                  #Alternatively from elsewhere
2120                                  #or, as a last resort,
2121                                  #specified here.
2122                                  #The value of -100 will work
2123                                  #for the Wollongong tsunami
2124                                  #scenario but is very hacky
2125    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2126    sww format native to pyvolution.
2127
2128    Specify only basename_in and read files of the form
2129    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2130    relative height, x-velocity and y-velocity, respectively.
2131
2132    Also convert latitude and longitude to UTM. All coordinates are
2133    assumed to be given in the GDA94 datum.
2134
2135    min's and max's: If omitted - full extend is used.
2136    To include a value min may equal it, while max must exceed it.
2137    Lat and lon are assuemd to be in decimal degrees
2138
2139    origin is a 3-tuple with geo referenced
2140    UTM coordinates (zone, easting, northing)
2141
2142    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2143    which means that longitude is the fastest
2144    varying dimension (row major order, so to speak)
2145
2146    ferret2sww uses grid points as vertices in a triangular grid
2147    counting vertices from lower left corner upwards, then right
2148    """
2149
2150    import os
2151    from Scientific.IO.NetCDF import NetCDFFile
2152    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2153    from Numeric import allclose, around
2154
2155    precision = Float
2156
2157    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2158
2159    if minlat != None:
2160        assert -90 < minlat < 90 , msg
2161    if maxlat != None:
2162        assert -90 < maxlat < 90 , msg
2163    if minlon != None:
2164        assert -180 < minlon < 180 , msg
2165    if maxlon != None:
2166        assert -180 < maxlon < 180 , msg
2167
2168
2169    #Get NetCDF data
2170    if verbose: print 'Reading files %s_*.nc' %basename_in
2171    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2172    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2173    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2174    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2175
2176    if basename_out is None:
2177        swwname = basename_in + '.sww'
2178    else:
2179        swwname = basename_out + '.sww'
2180
2181#    Get dimensions of file_h
2182    for dimension in file_h.dimensions.keys():
2183        if dimension[:3] == 'LON':
2184            dim_h_longitude = dimension
2185        if dimension[:3] == 'LAT':
2186            dim_h_latitude = dimension
2187        if dimension[:4] == 'TIME':
2188            dim_h_time = dimension
2189
2190#    print 'long:', dim_h_longitude
2191#    print 'lats:', dim_h_latitude
2192#    print 'times:', dim_h_time
2193
2194    times = file_h.variables[dim_h_time]
2195    latitudes = file_h.variables[dim_h_latitude]
2196    longitudes = file_h.variables[dim_h_longitude]
2197   
2198# get dimensions for file_e
2199    for dimension in file_e.dimensions.keys():
2200        if dimension[:3] == 'LON':
2201            dim_e_longitude = dimension
2202        if dimension[:3] == 'LAT':
2203            dim_e_latitude = dimension
2204        if dimension[:4] == 'TIME':
2205            dim_e_time = dimension
2206
2207# get dimensions for file_u
2208    for dimension in file_u.dimensions.keys():
2209        if dimension[:3] == 'LON':
2210            dim_u_longitude = dimension
2211        if dimension[:3] == 'LAT':
2212            dim_u_latitude = dimension
2213        if dimension[:4] == 'TIME':
2214            dim_u_time = dimension
2215           
2216# get dimensions for file_v
2217    for dimension in file_v.dimensions.keys():
2218        if dimension[:3] == 'LON':
2219            dim_v_longitude = dimension
2220        if dimension[:3] == 'LAT':
2221            dim_v_latitude = dimension
2222        if dimension[:4] == 'TIME':
2223            dim_v_time = dimension
2224
2225
2226    #Precision used by most for lat/lon is 4 or 5 decimals
2227    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2228    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2229
2230    #Check that files are compatible
2231    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2232    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2233    assert allclose(latitudes, e_lat)
2234
2235    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2236    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2237    assert allclose(longitudes, e_lon)
2238
2239    if mint == None:
2240        jmin = 0
2241    else:
2242        jmin = searchsorted(times, mint)
2243
2244    if maxt == None:
2245        jmax=len(times)
2246    else:
2247        jmax = searchsorted(times, maxt)
2248
2249    if minlat == None:
2250        kmin=0
2251    else:
2252        kmin = searchsorted(latitudes, minlat)
2253
2254    if maxlat == None:
2255        kmax = len(latitudes)
2256    else:
2257        kmax = searchsorted(latitudes, maxlat)
2258
2259    if minlon == None:
2260        lmin=0
2261    else:
2262        lmin = searchsorted(longitudes, minlon)
2263
2264    if maxlon == None:
2265        lmax = len(longitudes)
2266    else:
2267        lmax = searchsorted(longitudes, maxlon)
2268
2269#    print' j', jmin, jmax
2270    times = times[jmin:jmax]
2271    latitudes = latitudes[kmin:kmax]
2272    longitudes = longitudes[lmin:lmax]
2273
2274
2275    if verbose: print 'cropping'
2276    zname = 'ELEVATION'
2277
2278    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2279    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2280    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2281    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2282
2283    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2284    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2285    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2286    #        from Numeric import asarray
2287    #        elevations=elevations.tolist()
2288    #        elevations.reverse()
2289    #        elevations=asarray(elevations)
2290    #    else:
2291    #        from Numeric import asarray
2292    #        elevations=elevations.tolist()
2293    #        elevations.reverse()
2294    #        elevations=asarray(elevations)
2295    #        'print hmmm'
2296
2297
2298
2299    #Get missing values
2300    nan_ha = file_h.variables['HA'].missing_value[0]
2301    nan_ua = file_u.variables['UA'].missing_value[0]
2302    nan_va = file_v.variables['VA'].missing_value[0]
2303    if hasattr(file_e.variables[zname],'missing_value'):
2304        nan_e  = file_e.variables[zname].missing_value[0]
2305    else:
2306        nan_e = None
2307
2308    #Cleanup
2309    from Numeric import sometrue
2310
2311    missing = (amplitudes == nan_ha)
2312    if sometrue (missing):
2313        if fail_on_NaN:
2314            msg = 'NetCDFFile %s contains missing values'\
2315                  %(basename_in+'_ha.nc')
2316            raise DataMissingValuesError, msg
2317        else:
2318            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2319
2320    missing = (uspeed == nan_ua)
2321    if sometrue (missing):
2322        if fail_on_NaN:
2323            msg = 'NetCDFFile %s contains missing values'\
2324                  %(basename_in+'_ua.nc')
2325            raise DataMissingValuesError, msg
2326        else:
2327            uspeed = uspeed*(missing==0) + missing*NaN_filler
2328
2329    missing = (vspeed == nan_va)
2330    if sometrue (missing):
2331        if fail_on_NaN:
2332            msg = 'NetCDFFile %s contains missing values'\
2333                  %(basename_in+'_va.nc')
2334            raise DataMissingValuesError, msg
2335        else:
2336            vspeed = vspeed*(missing==0) + missing*NaN_filler
2337
2338
2339    missing = (elevations == nan_e)
2340    if sometrue (missing):
2341        if fail_on_NaN:
2342            msg = 'NetCDFFile %s contains missing values'\
2343                  %(basename_in+'_e.nc')
2344            raise DataMissingValuesError, msg
2345        else:
2346            elevations = elevations*(missing==0) + missing*NaN_filler
2347
2348    #######
2349
2350
2351
2352    number_of_times = times.shape[0]
2353    number_of_latitudes = latitudes.shape[0]
2354    number_of_longitudes = longitudes.shape[0]
2355
2356    assert amplitudes.shape[0] == number_of_times
2357    assert amplitudes.shape[1] == number_of_latitudes
2358    assert amplitudes.shape[2] == number_of_longitudes
2359
2360    if verbose:
2361        print '------------------------------------------------'
2362        print 'Statistics:'
2363        print '  Extent (lat/lon):'
2364        print '    lat in [%f, %f], len(lat) == %d'\
2365              %(min(latitudes.flat), max(latitudes.flat),
2366                len(latitudes.flat))
2367        print '    lon in [%f, %f], len(lon) == %d'\
2368              %(min(longitudes.flat), max(longitudes.flat),
2369                len(longitudes.flat))
2370        print '    t in [%f, %f], len(t) == %d'\
2371              %(min(times.flat), max(times.flat), len(times.flat))
2372
2373        q = amplitudes.flat
2374        name = 'Amplitudes (ha) [cm]'
2375        print %s in [%f, %f]' %(name, min(q), max(q))
2376
2377        q = uspeed.flat
2378        name = 'Speeds (ua) [cm/s]'
2379        print %s in [%f, %f]' %(name, min(q), max(q))
2380
2381        q = vspeed.flat
2382        name = 'Speeds (va) [cm/s]'
2383        print %s in [%f, %f]' %(name, min(q), max(q))
2384
2385        q = elevations.flat
2386        name = 'Elevations (e) [m]'
2387        print %s in [%f, %f]' %(name, min(q), max(q))
2388
2389
2390    #print number_of_latitudes, number_of_longitudes
2391    number_of_points = number_of_latitudes*number_of_longitudes
2392    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2393
2394
2395    file_h.close()
2396    file_u.close()
2397    file_v.close()
2398    file_e.close()
2399
2400
2401    # NetCDF file definition
2402    outfile = NetCDFFile(swwname, 'w')
2403
2404    #Create new file
2405    outfile.institution = 'Geoscience Australia'
2406    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2407                          %(basename_in + '_ha.nc',
2408                            basename_in + '_ua.nc',
2409                            basename_in + '_va.nc',
2410                            basename_in + '_e.nc')
2411
2412
2413    #For sww compatibility
2414    outfile.smoothing = 'Yes'
2415    outfile.order = 1
2416
2417    #Start time in seconds since the epoch (midnight 1/1/1970)
2418    outfile.starttime = starttime = times[0]
2419    times = times - starttime  #Store relative times
2420
2421    # dimension definitions
2422    outfile.createDimension('number_of_volumes', number_of_volumes)
2423
2424    outfile.createDimension('number_of_vertices', 3)
2425    outfile.createDimension('number_of_points', number_of_points)
2426
2427
2428    #outfile.createDimension('number_of_timesteps', len(times))
2429    outfile.createDimension('number_of_timesteps', len(times))
2430
2431    # variable definitions
2432    outfile.createVariable('x', precision, ('number_of_points',))
2433    outfile.createVariable('y', precision, ('number_of_points',))
2434    outfile.createVariable('elevation', precision, ('number_of_points',))
2435
2436    #FIXME: Backwards compatibility
2437    outfile.createVariable('z', precision, ('number_of_points',))
2438    #################################
2439
2440    outfile.createVariable('volumes', Int, ('number_of_volumes',
2441                                            'number_of_vertices'))
2442
2443    outfile.createVariable('time', precision,
2444                           ('number_of_timesteps',))
2445
2446    outfile.createVariable('stage', precision,
2447                           ('number_of_timesteps',
2448                            'number_of_points'))
2449
2450    outfile.createVariable('xmomentum', precision,
2451                           ('number_of_timesteps',
2452                            'number_of_points'))
2453
2454    outfile.createVariable('ymomentum', precision,
2455                           ('number_of_timesteps',
2456                            'number_of_points'))
2457
2458
2459    #Store
2460    from coordinate_transforms.redfearn import redfearn
2461    x = zeros(number_of_points, Float)  #Easting
2462    y = zeros(number_of_points, Float)  #Northing
2463
2464
2465    if verbose: print 'Making triangular grid'
2466    #Check zone boundaries
2467    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2468
2469    vertices = {}
2470    i = 0
2471    for k, lat in enumerate(latitudes):       #Y direction
2472        for l, lon in enumerate(longitudes):  #X direction
2473
2474            vertices[l,k] = i
2475
2476            zone, easting, northing = redfearn(lat,lon)
2477
2478            msg = 'Zone boundary crossed at longitude =', lon
2479            #assert zone == refzone, msg
2480            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2481            x[i] = easting
2482            y[i] = northing
2483            i += 1
2484
2485
2486    #Construct 2 triangles per 'rectangular' element
2487    volumes = []
2488    for l in range(number_of_longitudes-1):    #X direction
2489        for k in range(number_of_latitudes-1): #Y direction
2490            v1 = vertices[l,k+1]
2491            v2 = vertices[l,k]
2492            v3 = vertices[l+1,k+1]
2493            v4 = vertices[l+1,k]
2494
2495            volumes.append([v1,v2,v3]) #Upper element
2496            volumes.append([v4,v3,v2]) #Lower element
2497
2498    volumes = array(volumes)
2499
2500    if origin == None:
2501        zone = refzone
2502        xllcorner = min(x)
2503        yllcorner = min(y)
2504    else:
2505        zone = origin[0]
2506        xllcorner = origin[1]
2507        yllcorner = origin[2]
2508
2509
2510    outfile.xllcorner = xllcorner
2511    outfile.yllcorner = yllcorner
2512    outfile.zone = zone
2513
2514
2515    if elevation is not None:
2516        z = elevation
2517    else:
2518        if inverted_bathymetry:
2519            z = -1*elevations
2520        else:
2521            z = elevations
2522    #FIXME: z should be obtained from MOST and passed in here
2523
2524    from Numeric import resize
2525    z = resize(z,outfile.variables['z'][:].shape)
2526    outfile.variables['x'][:] = x - xllcorner
2527    outfile.variables['y'][:] = y - yllcorner
2528    outfile.variables['z'][:] = z             #FIXME HACK
2529    outfile.variables['elevation'][:] = z
2530    outfile.variables['time'][:] = times   #Store time relative
2531    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2532
2533
2534
2535    #Time stepping
2536    stage = outfile.variables['stage']
2537    xmomentum = outfile.variables['xmomentum']
2538    ymomentum = outfile.variables['ymomentum']
2539
2540    if verbose: print 'Converting quantities'
2541    n = len(times)
2542    for j in range(n):
2543        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2544        i = 0
2545        for k in range(number_of_latitudes):      #Y direction
2546            for l in range(number_of_longitudes): #X direction
2547                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2548                stage[j,i] = w
2549                h = w - z[i]
2550                xmomentum[j,i] = uspeed[j,k,l]/100*h
2551                ymomentum[j,i] = vspeed[j,k,l]/100*h
2552                i += 1
2553
2554    #outfile.close()
2555
2556    #FIXME: Refactor using code from file_function.statistics
2557    #Something like print swwstats(swwname)
2558    if verbose:
2559        x = outfile.variables['x'][:]
2560        y = outfile.variables['y'][:]
2561        times = outfile.variables['time'][:]
2562        print '------------------------------------------------'
2563        print 'Statistics of output file:'
2564        print '  Name: %s' %swwname
2565        print '  Reference:'
2566        print '    Lower left corner: [%f, %f]'\
2567              %(xllcorner, yllcorner)
2568        print '    Start time: %f' %starttime
2569        print '  Extent:'
2570        print '    x [m] in [%f, %f], len(x) == %d'\
2571              %(min(x.flat), max(x.flat), len(x.flat))
2572        print '    y [m] in [%f, %f], len(y) == %d'\
2573              %(min(y.flat), max(y.flat), len(y.flat))
2574        print '    t [s] in [%f, %f], len(t) == %d'\
2575              %(min(times), max(times), len(times))
2576        print '  Quantities [SI units]:'
2577        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2578            q = outfile.variables[name][:].flat
2579            print '    %s in [%f, %f]' %(name, min(q), max(q))
2580
2581
2582
2583    outfile.close()
2584
2585
2586
2587
2588
2589def timefile2netcdf(filename, quantity_names = None):
2590    """Template for converting typical text files with time series to
2591    NetCDF tms file.
2592
2593
2594    The file format is assumed to be either two fields separated by a comma:
2595
2596        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2597
2598    E.g
2599
2600      31/08/04 00:00:00, 1.328223 0 0
2601      31/08/04 00:15:00, 1.292912 0 0
2602
2603    will provide a time dependent function f(t) with three attributes
2604
2605    filename is assumed to be the rootname with extenisons .txt and .sww
2606    """
2607
2608    import time, calendar
2609    from Numeric import array
2610    from config import time_format
2611    from utilities.numerical_tools import ensure_numeric
2612
2613
2614
2615    fid = open(filename + '.txt')
2616    line = fid.readline()
2617    fid.close()
2618
2619    fields = line.split(',')
2620    msg = 'File %s must have the format date, value0 value1 value2 ...'
2621    assert len(fields) == 2, msg
2622
2623    try:
2624        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2625    except ValueError:
2626        msg = 'First field in file %s must be' %filename
2627        msg += ' date-time with format %s.\n' %time_format
2628        msg += 'I got %s instead.' %fields[0]
2629        raise DataTimeError, msg
2630
2631
2632    #Split values
2633    values = []
2634    for value in fields[1].split():
2635        values.append(float(value))
2636
2637    q = ensure_numeric(values)
2638
2639    msg = 'ERROR: File must contain at least one independent value'
2640    assert len(q.shape) == 1, msg
2641
2642
2643
2644    #Read times proper
2645    from Numeric import zeros, Float, alltrue
2646    from config import time_format
2647    import time, calendar
2648
2649    fid = open(filename + '.txt')
2650    lines = fid.readlines()
2651    fid.close()
2652
2653    N = len(lines)
2654    d = len(q)
2655
2656    T = zeros(N, Float)       #Time
2657    Q = zeros((N, d), Float)  #Values
2658
2659    for i, line in enumerate(lines):
2660        fields = line.split(',')
2661        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2662
2663        T[i] = realtime - starttime
2664
2665        for j, value in enumerate(fields[1].split()):
2666            Q[i, j] = float(value)
2667
2668    msg = 'File %s must list time as a monotonuosly ' %filename
2669    msg += 'increasing sequence'
2670    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2671
2672
2673    #Create NetCDF file
2674    from Scientific.IO.NetCDF import NetCDFFile
2675
2676    fid = NetCDFFile(filename + '.tms', 'w')
2677
2678
2679    fid.institution = 'Geoscience Australia'
2680    fid.description = 'Time series'
2681
2682
2683    #Reference point
2684    #Start time in seconds since the epoch (midnight 1/1/1970)
2685    #FIXME: Use Georef
2686    fid.starttime = starttime
2687
2688
2689    # dimension definitions
2690    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2691    #fid.createDimension('number_of_vertices', 3)
2692
2693
2694    fid.createDimension('number_of_timesteps', len(T))
2695
2696    fid.createVariable('time', Float, ('number_of_timesteps',))
2697
2698    fid.variables['time'][:] = T
2699
2700    for i in range(Q.shape[1]):
2701        try:
2702            name = quantity_names[i]
2703        except:
2704            name = 'Attribute%d'%i
2705
2706        fid.createVariable(name, Float, ('number_of_timesteps',))
2707        fid.variables[name][:] = Q[:,i]
2708
2709    fid.close()
2710
2711
2712def extent_sww(file_name):
2713    """
2714    Read in an sww file.
2715
2716    Input;
2717    file_name - the sww file
2718
2719    Output;
2720    z - Vector of bed elevation
2721    volumes - Array.  Each row has 3 values, representing
2722    the vertices that define the volume
2723    time - Vector of the times where there is stage information
2724    stage - array with respect to time and vertices (x,y)
2725    """
2726
2727
2728    from Scientific.IO.NetCDF import NetCDFFile
2729
2730    #Check contents
2731    #Get NetCDF
2732    fid = NetCDFFile(file_name, 'r')
2733
2734    # Get the variables
2735    x = fid.variables['x'][:]
2736    y = fid.variables['y'][:]
2737    stage = fid.variables['stage'][:]
2738    #print "stage",stage
2739    #print "stage.shap",stage.shape
2740    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2741    #print "min(stage)",min(stage)
2742
2743    fid.close()
2744
2745    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2746
2747
2748def sww2domain(filename,boundary=None,t=None,\
2749               fail_if_NaN=True,NaN_filler=0\
2750               ,verbose = False,very_verbose = False):
2751    """
2752    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2753
2754    Boundary is not recommended if domain.smooth is not selected, as it
2755    uses unique coordinates, but not unique boundaries. This means that
2756    the boundary file will not be compatable with the coordinates, and will
2757    give a different final boundary, or crash.
2758    """
2759    NaN=9.969209968386869e+036
2760    #initialise NaN.
2761
2762    from Scientific.IO.NetCDF import NetCDFFile
2763    from shallow_water import Domain
2764    from Numeric import asarray, transpose, resize
2765
2766    if verbose: print 'Reading from ', filename
2767    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2768    time = fid.variables['time']       #Timesteps
2769    if t is None:
2770        t = time[-1]
2771    time_interp = get_time_interp(time,t)
2772
2773    # Get the variables as Numeric arrays
2774    x = fid.variables['x'][:]             #x-coordinates of vertices
2775    y = fid.variables['y'][:]             #y-coordinates of vertices
2776    elevation = fid.variables['elevation']     #Elevation
2777    stage = fid.variables['stage']     #Water level
2778    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2779    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2780
2781    starttime = fid.starttime[0]
2782    volumes = fid.variables['volumes'][:] #Connectivity
2783    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2784
2785    conserved_quantities = []
2786    interpolated_quantities = {}
2787    other_quantities = []
2788
2789    # get geo_reference
2790    #sww files don't have to have a geo_ref
2791    try:
2792        geo_reference = Geo_reference(NetCDFObject=fid)
2793    except: #AttributeError, e:
2794        geo_reference = None
2795
2796    if verbose: print '    getting quantities'
2797    for quantity in fid.variables.keys():
2798        dimensions = fid.variables[quantity].dimensions
2799        if 'number_of_timesteps' in dimensions:
2800            conserved_quantities.append(quantity)
2801            interpolated_quantities[quantity]=\
2802                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2803        else: other_quantities.append(quantity)
2804
2805    other_quantities.remove('x')
2806    other_quantities.remove('y')
2807    other_quantities.remove('z')
2808    other_quantities.remove('volumes')
2809
2810    conserved_quantities.remove('time')
2811
2812    if verbose: print '    building domain'
2813    #    From domain.Domain:
2814    #    domain = Domain(coordinates, volumes,\
2815    #                    conserved_quantities = conserved_quantities,\
2816    #                    other_quantities = other_quantities,zone=zone,\
2817    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2818
2819    #   From shallow_water.Domain:
2820    coordinates=coordinates.tolist()
2821    volumes=volumes.tolist()
2822    #FIXME:should this be in mesh?(peter row)
2823    if fid.smoothing == 'Yes': unique = False
2824    else: unique = True
2825    if unique:
2826        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2827
2828
2829    try:
2830        domain = Domain(coordinates, volumes, boundary)
2831    except AssertionError, e:
2832        fid.close()
2833        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2834        raise DataDomainError, msg
2835
2836    if not boundary is None:
2837        domain.boundary = boundary
2838
2839    domain.geo_reference = geo_reference
2840
2841    domain.starttime=float(starttime)+float(t)
2842    domain.time=0.0
2843
2844    for quantity in other_quantities:
2845        try:
2846            NaN = fid.variables[quantity].missing_value
2847        except:
2848            pass #quantity has no missing_value number
2849        X = fid.variables[quantity][:]
2850        if very_verbose:
2851            print '       ',quantity
2852            print '        NaN =',NaN
2853            print '        max(X)'
2854            print '       ',max(X)
2855            print '        max(X)==NaN'
2856            print '       ',max(X)==NaN
2857            print ''
2858        if (max(X)==NaN) or (min(X)==NaN):
2859            if fail_if_NaN:
2860                msg = 'quantity "%s" contains no_data entry'%quantity
2861                raise DataMissingValuesError, msg
2862            else:
2863                data = (X<>NaN)
2864                X = (X*data)+(data==0)*NaN_filler
2865        if unique:
2866            X = resize(X,(len(X)/3,3))
2867        domain.set_quantity(quantity,X)
2868    #
2869    for quantity in conserved_quantities:
2870        try:
2871            NaN = fid.variables[quantity].missing_value
2872        except:
2873            pass #quantity has no missing_value number
2874        X = interpolated_quantities[quantity]
2875        if very_verbose:
2876            print '       ',quantity
2877            print '        NaN =',NaN
2878            print '        max(X)'
2879            print '       ',max(X)
2880            print '        max(X)==NaN'
2881            print '       ',max(X)==NaN
2882            print ''
2883        if (max(X)==NaN) or (min(X)==NaN):
2884            if fail_if_NaN:
2885                msg = 'quantity "%s" contains no_data entry'%quantity
2886                raise DataMissingValuesError, msg
2887            else:
2888                data = (X<>NaN)
2889                X = (X*data)+(data==0)*NaN_filler
2890        if unique:
2891            X = resize(X,(X.shape[0]/3,3))
2892        domain.set_quantity(quantity,X)
2893
2894    fid.close()
2895    return domain
2896
2897def interpolated_quantity(saved_quantity,time_interp):
2898
2899    #given an index and ratio, interpolate quantity with respect to time.
2900    index,ratio = time_interp
2901    Q = saved_quantity
2902    if ratio > 0:
2903        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2904    else:
2905        q = Q[index]
2906    #Return vector of interpolated values
2907    return q
2908
2909def get_time_interp(time,t=None):
2910    #Finds the ratio and index for time interpolation.
2911    #It is borrowed from previous pyvolution code.
2912    if t is None:
2913        t=time[-1]
2914        index = -1
2915        ratio = 0.
2916    else:
2917        T = time
2918        tau = t
2919        index=0
2920        msg = 'Time interval derived from file %s [%s:%s]'\
2921            %('FIXMEfilename', T[0], T[-1])
2922        msg += ' does not match model time: %s' %tau
2923        if tau < time[0]: raise DataTimeError, msg
2924        if tau > time[-1]: raise DataTimeError, msg
2925        while tau > time[index]: index += 1
2926        while tau < time[index]: index -= 1
2927        if tau == time[index]:
2928            #Protect against case where tau == time[-1] (last time)
2929            # - also works in general when tau == time[i]
2930            ratio = 0
2931        else:
2932            #t is now between index and index+1
2933            ratio = (tau - time[index])/(time[index+1] - time[index])
2934    return (index,ratio)
2935
2936
2937def weed(coordinates,volumes,boundary = None):
2938    if type(coordinates)=='array':
2939        coordinates = coordinates.tolist()
2940    if type(volumes)=='array':
2941        volumes = volumes.tolist()
2942
2943    unique = False
2944    point_dict = {}
2945    same_point = {}
2946    for i in range(len(coordinates)):
2947        point = tuple(coordinates[i])
2948        if point_dict.has_key(point):
2949            unique = True
2950            same_point[i]=point
2951            #to change all point i references to point j
2952        else:
2953            point_dict[point]=i
2954            same_point[i]=point
2955
2956    coordinates = []
2957    i = 0
2958    for point in point_dict.keys():
2959        point = tuple(point)
2960        coordinates.append(list(point))
2961        point_dict[point]=i
2962        i+=1
2963
2964
2965    for volume in volumes:
2966        for i in range(len(volume)):
2967            index = volume[i]
2968            if index>-1:
2969                volume[i]=point_dict[same_point[index]]
2970
2971    new_boundary = {}
2972    if not boundary is None:
2973        for segment in boundary.keys():
2974            point0 = point_dict[same_point[segment[0]]]
2975            point1 = point_dict[same_point[segment[1]]]
2976            label = boundary[segment]
2977            #FIXME should the bounday attributes be concaterated
2978            #('exterior, pond') or replaced ('pond')(peter row)
2979
2980            if new_boundary.has_key((point0,point1)):
2981                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2982                                              #+','+label
2983
2984            elif new_boundary.has_key((point1,point0)):
2985                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2986                                              #+','+label
2987            else: new_boundary[(point0,point1)]=label
2988
2989        boundary = new_boundary
2990
2991    return coordinates,volumes,boundary
2992
2993
2994def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2995                 verbose=False):
2996    """Read Digitial Elevation model from the following NetCDF format (.dem)
2997
2998    Example:
2999
3000    ncols         3121
3001    nrows         1800
3002    xllcorner     722000
3003    yllcorner     5893000
3004    cellsize      25
3005    NODATA_value  -9999
3006    138.3698 137.4194 136.5062 135.5558 ..........
3007
3008    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3009    """
3010
3011    import os
3012    from Scientific.IO.NetCDF import NetCDFFile
3013    from Numeric import Float, zeros, sum, reshape, equal
3014
3015    root = basename_in
3016    inname = root + '.dem'
3017
3018    #Open existing netcdf file to read
3019    infile = NetCDFFile(inname, 'r')
3020    if verbose: print 'Reading DEM from %s' %inname
3021
3022    #Read metadata
3023    ncols = infile.ncols[0]
3024    nrows = infile.nrows[0]
3025    xllcorner = infile.xllcorner[0]
3026    yllcorner = infile.yllcorner[0]
3027    cellsize = infile.cellsize[0]
3028    NODATA_value = infile.NODATA_value[0]
3029    zone = infile.zone[0]
3030    false_easting = infile.false_easting[0]
3031    false_northing = infile.false_northing[0]
3032    projection = infile.projection
3033    datum = infile.datum
3034    units = infile.units
3035
3036    dem_elevation = infile.variables['elevation']
3037
3038    #Get output file name
3039    if basename_out == None:
3040        outname = root + '_' + repr(cellsize_new) + '.dem'
3041    else:
3042        outname = basename_out + '.dem'
3043
3044    if verbose: print 'Write decimated NetCDF file to %s' %outname
3045
3046    #Determine some dimensions for decimated grid
3047    (nrows_stencil, ncols_stencil) = stencil.shape
3048    x_offset = ncols_stencil / 2
3049    y_offset = nrows_stencil / 2
3050    cellsize_ratio = int(cellsize_new / cellsize)
3051    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3052    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3053
3054    #Open netcdf file for output
3055    outfile = NetCDFFile(outname, 'w')
3056
3057    #Create new file
3058    outfile.institution = 'Geoscience Australia'
3059    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3060                           'of spatial point data'
3061    #Georeferencing
3062    outfile.zone = zone
3063    outfile.projection = projection
3064    outfile.datum = datum
3065    outfile.units = units
3066
3067    outfile.cellsize = cellsize_new
3068    outfile.NODATA_value = NODATA_value
3069    outfile.false_easting = false_easting
3070    outfile.false_northing = false_northing
3071
3072    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3073    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3074    outfile.ncols = ncols_new
3075    outfile.nrows = nrows_new
3076
3077    # dimension definition
3078    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3079
3080    # variable definition
3081    outfile.createVariable('elevation', Float, ('number_of_points',))
3082
3083    # Get handle to the variable
3084    elevation = outfile.variables['elevation']
3085
3086    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3087
3088    #Store data
3089    global_index = 0
3090    for i in range(nrows_new):
3091        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3092        lower_index = global_index
3093        telev =  zeros(ncols_new, Float)
3094        local_index = 0
3095        trow = i * cellsize_ratio
3096
3097        for j in range(ncols_new):
3098            tcol = j * cellsize_ratio
3099            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3100
3101            #if dem contains 1 or more NODATA_values set value in
3102            #decimated dem to NODATA_value, else compute decimated
3103            #value using stencil
3104            if sum(sum(equal(tmp, NODATA_value))) > 0:
3105                telev[local_index] = NODATA_value
3106            else:
3107                telev[local_index] = sum(sum(tmp * stencil))
3108
3109            global_index += 1
3110            local_index += 1
3111
3112        upper_index = global_index
3113
3114        elevation[lower_index:upper_index] = telev
3115
3116    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3117
3118    infile.close()
3119    outfile.close()
3120
3121
3122
3123
3124def tsh2sww(filename, verbose=False): #test_tsh2sww
3125    """
3126    to check if a tsh/msh file 'looks' good.
3127    """
3128
3129    #FIXME Move to data_manager
3130    from shallow_water import Domain
3131    from pmesh2domain import pmesh_to_domain_instance
3132    import time, os
3133    from data_manager import get_dataobject
3134    from os import sep, path
3135    from utilities.numerical_tools import mean
3136
3137    if verbose == True:print 'Creating domain from', filename
3138    domain = pmesh_to_domain_instance(filename, Domain)
3139    if verbose == True:print "Number of triangles = ", len(domain)
3140
3141    domain.smooth = True
3142    domain.format = 'sww'   #Native netcdf visualisation format
3143    file_path, filename = path.split(filename)
3144    filename, ext = path.splitext(filename)
3145    domain.filename = filename
3146    domain.reduction = mean
3147    if verbose == True:print "file_path",file_path
3148    if file_path == "":file_path = "."
3149    domain.set_datadir(file_path)
3150
3151    if verbose == True:
3152        print "Output written to " + domain.get_datadir() + sep + \
3153              domain.filename + "." + domain.format
3154    sww = get_dataobject(domain)
3155    sww.store_connectivity()
3156    sww.store_timestep('stage')
3157
3158
3159def asc_csiro2sww(bath_dir,
3160                  elevation_dir,
3161                  ucur_dir,
3162                  vcur_dir,
3163                  sww_file,
3164                  minlat = None, maxlat = None,
3165                  minlon = None, maxlon = None,
3166                  zscale=1,
3167                  mean_stage = 0,
3168                  fail_on_NaN = True,
3169                  elevation_NaN_filler = 0,
3170                  bath_prefix='ba',
3171                  elevation_prefix='el',
3172                  verbose=False):
3173    """
3174    Produce an sww boundary file, from esri ascii data from CSIRO.
3175
3176    Also convert latitude and longitude to UTM. All coordinates are
3177    assumed to be given in the GDA94 datum.
3178
3179    assume:
3180    All files are in esri ascii format
3181
3182    4 types of information
3183    bathymetry
3184    elevation
3185    u velocity
3186    v velocity
3187
3188    Assumptions
3189    The metadata of all the files is the same
3190    Each type is in a seperate directory
3191    One bath file with extention .000
3192    The time period is less than 24hrs and uniform.
3193    """
3194    from Scientific.IO.NetCDF import NetCDFFile
3195
3196    from coordinate_transforms.redfearn import redfearn
3197
3198    precision = Float # So if we want to change the precision its done here
3199
3200    # go in to the bath dir and load the only file,
3201    bath_files = os.listdir(bath_dir)
3202    #print "bath_files",bath_files
3203
3204    #fixme: make more general?
3205    bath_file = bath_files[0]
3206    bath_dir_file =  bath_dir + os.sep + bath_file
3207    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3208    #print "bath_metadata",bath_metadata
3209    #print "bath_grid",bath_grid
3210
3211    #Use the date.time of the bath file as a basis for
3212    #the start time for other files
3213    base_start = bath_file[-12:]
3214
3215    #go into the elevation dir and load the 000 file
3216    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3217                         + base_start
3218    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3219    #print "elevation_dir_file",elevation_dir_file
3220    #print "elevation_grid", elevation_grid
3221
3222    elevation_files = os.listdir(elevation_dir)
3223    ucur_files = os.listdir(ucur_dir)
3224    vcur_files = os.listdir(vcur_dir)
3225
3226    # the first elevation file should be the
3227    # file with the same base name as the bath data
3228    #print "elevation_files[0]",elevation_files[0]
3229    #print "'el' + base_start",'el' + base_start
3230    assert elevation_files[0] == 'el' + base_start
3231
3232    #assert bath_metadata == elevation_metadata
3233
3234
3235
3236    number_of_latitudes = bath_grid.shape[0]
3237    number_of_longitudes = bath_grid.shape[1]
3238    #number_of_times = len(os.listdir(elevation_dir))
3239    #number_of_points = number_of_latitudes*number_of_longitudes
3240    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3241
3242    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3243                  for x in range(number_of_longitudes)]
3244    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3245                 for y in range(number_of_latitudes)]
3246
3247     # reverse order of lat, so the fist lat represents the first grid row
3248    latitudes.reverse()
3249
3250    #print "latitudes - before _get_min_max_indexes",latitudes
3251    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3252                                                 minlat=minlat, maxlat=maxlat,
3253                                                 minlon=minlon, maxlon=maxlon)
3254
3255
3256    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3257    #print "bath_grid",bath_grid
3258    latitudes = latitudes[kmin:kmax]
3259    longitudes = longitudes[lmin:lmax]
3260    number_of_latitudes = len(latitudes)
3261    number_of_longitudes = len(longitudes)
3262    number_of_times = len(os.listdir(elevation_dir))
3263    number_of_points = number_of_latitudes*number_of_longitudes
3264    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3265    #print "number_of_points",number_of_points
3266
3267    #Work out the times
3268    if len(elevation_files) > 1:
3269        # Assume: The time period is less than 24hrs.
3270        time_period = (int(elevation_files[1][-3:]) - \
3271                      int(elevation_files[0][-3:]))*60*60
3272        times = [x*time_period for x in range(len(elevation_files))]
3273    else:
3274        times = [0.0]
3275    #print "times", times
3276    #print "number_of_latitudes", number_of_latitudes
3277    #print "number_of_longitudes", number_of_longitudes
3278    #print "number_of_times", number_of_times
3279    #print "latitudes", latitudes
3280    #print "longitudes", longitudes
3281
3282
3283    if verbose:
3284        print '------------------------------------------------'
3285        print 'Statistics:'
3286        print '  Extent (lat/lon):'
3287        print '    lat in [%f, %f], len(lat) == %d'\
3288              %(min(latitudes), max(latitudes),
3289                len(latitudes))
3290        print '    lon in [%f, %f], len(lon) == %d'\
3291              %(min(longitudes), max(longitudes),
3292                len(longitudes))
3293        print '    t in [%f, %f], len(t) == %d'\
3294              %(min(times), max(times), len(times))
3295
3296    ######### WRITE THE SWW FILE #############
3297    # NetCDF file definition
3298    outfile = NetCDFFile(sww_file, 'w')
3299
3300    #Create new file
3301    outfile.institution = 'Geoscience Australia'
3302    outfile.description = 'Converted from XXX'
3303
3304
3305    #For sww compatibility
3306    outfile.smoothing = 'Yes'
3307    outfile.order = 1
3308
3309    #Start time in seconds since the epoch (midnight 1/1/1970)
3310    outfile.starttime = starttime = times[0]
3311
3312
3313    # dimension definitions
3314    outfile.createDimension('number_of_volumes', number_of_volumes)
3315
3316    outfile.createDimension('number_of_vertices', 3)
3317    outfile.createDimension('number_of_points', number_of_points)
3318    outfile.createDimension('number_of_timesteps', number_of_times)
3319
3320    # variable definitions
3321    outfile.createVariable('x', precision, ('number_of_points',))
3322    outfile.createVariable('y', precision, ('number_of_points',))
3323    outfile.createVariable('elevation', precision, ('number_of_points',))
3324
3325    #FIXME: Backwards compatibility
3326    outfile.createVariable('z', precision, ('number_of_points',))
3327    #################################
3328
3329    outfile.createVariable('volumes', Int, ('number_of_volumes',
3330                                            'number_of_vertices'))
3331
3332    outfile.createVariable('time', precision,
3333                           ('number_of_timesteps',))
3334
3335    outfile.createVariable('stage', precision,
3336                           ('number_of_timesteps',
3337                            'number_of_points'))
3338
3339    outfile.createVariable('xmomentum', precision,
3340                           ('number_of_timesteps',
3341                            'number_of_points'))
3342
3343    outfile.createVariable('ymomentum', precision,
3344                           ('number_of_timesteps',
3345                            'number_of_points'))
3346
3347    #Store
3348    from coordinate_transforms.redfearn import redfearn
3349    x = zeros(number_of_points, Float)  #Easting
3350    y = zeros(number_of_points, Float)  #Northing
3351
3352    if verbose: print 'Making triangular grid'
3353    #Get zone of 1st point.
3354    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3355
3356    vertices = {}
3357    i = 0
3358    for k, lat in enumerate(latitudes):
3359        for l, lon in enumerate(longitudes):
3360
3361            vertices[l,k] = i
3362
3363            zone, easting, northing = redfearn(lat,lon)
3364
3365            msg = 'Zone boundary crossed at longitude =', lon
3366            #assert zone == refzone, msg
3367            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3368            x[i] = easting
3369            y[i] = northing
3370            i += 1
3371
3372
3373    #Construct 2 triangles per 'rectangular' element
3374    volumes = []
3375    for l in range(number_of_longitudes-1):    #X direction
3376        for k in range(number_of_latitudes-1): #Y direction
3377            v1 = vertices[l,k+1]
3378            v2 = vertices[l,k]
3379            v3 = vertices[l+1,k+1]
3380            v4 = vertices[l+1,k]
3381
3382            #Note, this is different to the ferrit2sww code
3383            #since the order of the lats is reversed.
3384            volumes.append([v1,v3,v2]) #Upper element
3385            volumes.append([v4,v2,v3]) #Lower element
3386
3387    volumes = array(volumes)
3388
3389    geo_ref = Geo_reference(refzone,min(x),min(y))
3390    geo_ref.write_NetCDF(outfile)
3391
3392    # This will put the geo ref in the middle
3393    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3394
3395
3396    if verbose:
3397        print '------------------------------------------------'
3398        print 'More Statistics:'
3399        print '  Extent (/lon):'
3400        print '    x in [%f, %f], len(lat) == %d'\
3401              %(min(x), max(x),
3402                len(x))
3403        print '    y in [%f, %f], len(lon) == %d'\
3404              %(min(y), max(y),
3405                len(y))
3406        print 'geo_ref: ',geo_ref
3407
3408    z = resize(bath_grid,outfile.variables['z'][:].shape)
3409    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3410    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3411    outfile.variables['z'][:] = z
3412    outfile.variables['elevation'][:] = z  #FIXME HACK
3413    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3414
3415    # do this to create an ok sww file.
3416    #outfile.variables['time'][:] = [0]   #Store time relative
3417    #outfile.variables['stage'] = z
3418    # put the next line up in the code after outfile.order = 1
3419    #number_of_times = 1
3420
3421    stage = outfile.variables['stage']
3422    xmomentum = outfile.variables['xmomentum']
3423    ymomentum = outfile.variables['ymomentum']
3424
3425    outfile.variables['time'][:] = times   #Store time relative
3426
3427    if verbose: print 'Converting quantities'
3428    n = number_of_times
3429    for j in range(number_of_times):
3430        # load in files
3431        elevation_meta, elevation_grid = \
3432           _read_asc(elevation_dir + os.sep + elevation_files[j])
3433
3434        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3435        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3436
3437        #print "elevation_grid",elevation_grid
3438        #cut matrix to desired size
3439        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3440        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3441        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3442        #print "elevation_grid",elevation_grid
3443        # handle missing values
3444        missing = (elevation_grid == elevation_meta['NODATA_value'])
3445        if sometrue (missing):
3446            if fail_on_NaN:
3447                msg = 'File %s contains missing values'\
3448                      %(elevation_files[j])
3449                raise DataMissingValuesError, msg
3450            else:
3451                elevation_grid = elevation_grid*(missing==0) + \
3452                                 missing*elevation_NaN_filler
3453
3454
3455        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3456        i = 0
3457        for k in range(number_of_latitudes):      #Y direction
3458            for l in range(number_of_longitudes): #X direction
3459                w = zscale*elevation_grid[k,l] + mean_stage
3460                stage[j,i] = w
3461                h = w - z[i]
3462                xmomentum[j,i] = u_momentum_grid[k,l]*h
3463                ymomentum[j,i] = v_momentum_grid[k,l]*h
3464                i += 1
3465    outfile.close()
3466
3467def _get_min_max_indexes(latitudes,longitudes,
3468                        minlat=None, maxlat=None,
3469                        minlon=None, maxlon=None):
3470    """
3471    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3472    specified with min/max lat/long
3473
3474    Think of the latitudes and longitudes describing a 2d surface.
3475    The area returned is, if possible, just big enough to cover the
3476    inputed max/min area. (This will not be possible if the max/min area
3477    has a section outside of the latitudes/longitudes area.)
3478
3479    assume latitudess & longitudes are sorted,
3480    long - from low to high (west to east, eg 148 - 151)
3481    lat - from high to low (north to south, eg -35 - -38)
3482    """
3483
3484    # reverse order of lat, so it's in ascending order
3485    latitudes.reverse()
3486    largest_lat_index = len(latitudes)-1
3487    #Cut out a smaller extent.
3488    if minlat == None:
3489        lat_min_index = 0
3490    else:
3491        lat_min_index = searchsorted(latitudes, minlat)-1
3492        if lat_min_index <0:
3493            lat_min_index = 0
3494
3495
3496    if maxlat == None:
3497        lat_max_index = largest_lat_index #len(latitudes)
3498    else:
3499        lat_max_index = searchsorted(latitudes, maxlat)
3500        if lat_max_index > largest_lat_index:
3501            lat_max_index = largest_lat_index
3502
3503    if minlon == None:
3504        lon_min_index = 0
3505    else:
3506        lon_min_index = searchsorted(longitudes, minlon)-1
3507        if lon_min_index <0:
3508            lon_min_index = 0
3509
3510    if maxlon == None:
3511        lon_max_index = len(longitudes)
3512    else:
3513        lon_max_index = searchsorted(longitudes, maxlon)
3514
3515    #Take into account that the latitude list was reversed
3516    latitudes.reverse() # Python passes by reference, need to swap back
3517    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3518                                   largest_lat_index - lat_min_index
3519    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3520    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3521
3522    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3523
3524
3525def _read_asc(filename, verbose=False):
3526    """Read esri file from the following ASCII format (.asc)
3527
3528    Example:
3529
3530    ncols         3121
3531    nrows         1800
3532    xllcorner     722000
3533    yllcorner     5893000
3534    cellsize      25
3535    NODATA_value  -9999
3536    138.3698 137.4194 136.5062 135.5558 ..........
3537
3538    """
3539
3540    datafile = open(filename)
3541
3542    if verbose: print 'Reading DEM from %s' %(filename)
3543    lines = datafile.readlines()
3544    datafile.close()
3545
3546    if verbose: print 'Got', len(lines), ' lines'
3547
3548    ncols = int(lines.pop(0).split()[1].strip())
3549    nrows = int(lines.pop(0).split()[1].strip())
3550    xllcorner = float(lines.pop(0).split()[1].strip())
3551    yllcorner = float(lines.pop(0).split()[1].strip())
3552    cellsize = float(lines.pop(0).split()[1].strip())
3553    NODATA_value = float(lines.pop(0).split()[1].strip())
3554
3555    assert len(lines) == nrows
3556
3557    #Store data
3558    grid = []
3559
3560    n = len(lines)
3561    for i, line in enumerate(lines):
3562        cells = line.split()
3563        assert len(cells) == ncols
3564        grid.append(array([float(x) for x in cells]))
3565    grid = array(grid)
3566
3567    return {'xllcorner':xllcorner,
3568            'yllcorner':yllcorner,
3569            'cellsize':cellsize,
3570            'NODATA_value':NODATA_value}, grid
3571
3572def sww2timeseries(swwfile,
3573                   gauge_filename,
3574                   gauge_data_outname,
3575                   quantity = None,
3576                   time_min = None,
3577                   time_max = None,
3578                   verbose = False):
3579
3580    """Read SWW file and extract time series for prescribed quantities at
3581    gauge locations.
3582
3583    The gauge locations are defined in gauge_filename. This file should be
3584    in the form: gaugename, easting, northing, and should be stored in a
3585    .csv or .xya file.
3586
3587    Time series data at the gauges can be written to file (for example,
3588    Benfield requested raw data) with the default being no data written.
3589
3590    The parameter quantity must be the name of an existing quantity or
3591    an expression involving existing quantities. The default is
3592    'depth'.
3593
3594    The user can define a list of quantities. The possibilities are
3595    the conserved quantitues of the shallow water wave equation and other
3596    quantities which can be derived from those, i.e.
3597    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3598
3599    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3600    Note, units of momentum are m^2/s and depth is m.
3601
3602    Velocity is absolute momentum divided by depth. (Confirming correct units:
3603    vel = abs mom / depth = (m^2/s)/m = m/s.)
3604
3605    Bearing returns the angle of the velocity vector from North.
3606
3607    If time_min and time_max is given, output plotted in that time range.
3608    The default is to plot the entire range of the time evident in sww file.
3609
3610    The export graphic format in 'png' and will be stored in the same
3611    directory as the input file.
3612    """
3613
3614    if quantity is None: quantity = 'depth'
3615
3616    # extract gauge locations from gauge file
3617
3618    # extract all quantities from swwfile (f = file_function)
3619    if time_min is None: time_min = min(f.T)
3620    if time_max is None: time_max = max(f.T)
3621
3622    # loop through appropriate range of time
3623    # plot prescribed quantities and export data if requested
3624
3625    #if gauge_data_outname is None:
3626
3627    return
Note: See TracBrowser for help on using the repository browser.