source: inundation/pyvolution/data_manager.py @ 2644

Last change on this file since 2644 was 2644, checked in by nick, 18 years ago

added functionality to data_manager.py can now read MOST and Ferret netCDF files

File size: 111.3 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58
59def make_filename(s):
60    """Transform argument string into a suitable filename
61    """
62
63    s = s.strip()
64    s = s.replace(' ', '_')
65    s = s.replace('(', '')
66    s = s.replace(')', '')
67    s = s.replace('__', '_')
68
69    return s
70
71
72def check_dir(path, verbose=None):
73    """Check that specified path exists.
74    If path does not exist it will be created if possible
75
76    USAGE:
77       checkdir(path, verbose):
78
79    ARGUMENTS:
80        path -- Directory
81        verbose -- Flag verbose output (default: None)
82
83    RETURN VALUE:
84        Verified path including trailing separator
85
86    """
87
88    import os, sys
89    import os.path
90
91    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
92        unix = 0
93    else:
94        unix = 1
95
96
97    if path[-1] != os.sep:
98        path = path + os.sep  # Add separator for directories
99
100    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
101    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
102        try:
103            exitcode=os.mkdir(path)
104
105            # Change access rights if possible
106            #
107            if unix:
108                exitcode=os.system('chmod 775 '+path)
109            else:
110                pass  # FIXME: What about acces rights under Windows?
111
112            if verbose: print 'MESSAGE: Directory', path, 'created.'
113
114        except:
115            print 'WARNING: Directory', path, 'could not be created.'
116            if unix:
117                path = '/tmp/'
118            else:
119                path = 'C:'
120
121            print 'Using directory %s instead' %path
122
123    return(path)
124
125
126
127def del_dir(path):
128    """Recursively delete directory path and all its contents
129    """
130
131    import os
132
133    if os.path.isdir(path):
134        for file in os.listdir(path):
135            X = os.path.join(path, file)
136
137
138            if os.path.isdir(X) and not os.path.islink(X):
139                del_dir(X)
140            else:
141                try:
142                    os.remove(X)
143                except:
144                    print "Could not remove file %s" %X
145
146        os.rmdir(path)
147
148
149
150def create_filename(datadir, filename, format, size=None, time=None):
151
152    import os
153    #from config import data_dir
154
155    FN = check_dir(datadir) + filename
156
157    if size is not None:
158        FN += '_size%d' %size
159
160    if time is not None:
161        FN += '_time%.2f' %time
162
163    FN += '.' + format
164    return FN
165
166
167def get_files(datadir, filename, format, size):
168    """Get all file (names) with gven name, size and format
169    """
170
171    import glob
172
173    import os
174    #from config import data_dir
175
176    dir = check_dir(datadir)
177
178    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
179    return glob.glob(pattern)
180
181
182
183#Generic class for storing output to e.g. visualisation or checkpointing
184class Data_format:
185    """Generic interface to data formats
186    """
187
188
189    def __init__(self, domain, extension, mode = 'w'):
190        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
191                                        '''   'w' (write)'''+\
192                                        '''   'r' (read)''' +\
193                                        '''   'a' (append)'''
194
195        #Create filename
196        #self.filename = create_filename(domain.get_datadir(),
197        #domain.get_name(), extension, len(domain))
198
199
200        self.filename = create_filename(domain.get_datadir(),
201                        domain.get_name(), extension)
202
203        #print 'F', self.filename
204        self.timestep = 0
205        self.number_of_volumes = len(domain)
206        self.domain = domain
207
208
209        #FIXME: Should we have a general set_precision function?
210
211
212
213#Class for storing output to e.g. visualisation
214class Data_format_sww(Data_format):
215    """Interface to native NetCDF format (.sww) for storing model output
216
217    There are two kinds of data
218
219    1: Constant data: Vertex coordinates and field values. Stored once
220    2: Variable data: Conserved quantities. Stored once per timestep.
221
222    All data is assumed to reside at vertex locations.
223    """
224
225
226    def __init__(self, domain, mode = 'w',\
227                 max_size = 2000000000,
228                 recursion = False):
229        from Scientific.IO.NetCDF import NetCDFFile
230        from Numeric import Int, Float, Float32
231
232        self.precision = Float32 #Use single precision
233        if hasattr(domain, 'max_size'):
234            self.max_size = domain.max_size #file size max is 2Gig
235        else:
236            self.max_size = max_size
237        self.recursion = recursion
238        self.mode = mode
239
240        Data_format.__init__(self, domain, 'sww', mode)
241
242
243        # NetCDF file definition
244        fid = NetCDFFile(self.filename, mode)
245
246        if mode == 'w':
247
248            #Create new file
249            fid.institution = 'Geoscience Australia'
250            fid.description = 'Output from pyvolution suitable for plotting'
251
252            if domain.smooth:
253                fid.smoothing = 'Yes'
254            else:
255                fid.smoothing = 'No'
256
257            fid.order = domain.default_order
258
259            if hasattr(domain, 'texture'):
260                fid.texture = domain.texture
261            #else:
262            #    fid.texture = 'None'
263
264            #Reference point
265            #Start time in seconds since the epoch (midnight 1/1/1970)
266            #FIXME: Use Georef
267            fid.starttime = domain.starttime
268
269            # dimension definitions
270            fid.createDimension('number_of_volumes', self.number_of_volumes)
271            fid.createDimension('number_of_vertices', 3)
272
273            if domain.smooth is True:
274                fid.createDimension('number_of_points', len(domain.vertexlist))
275            else:
276                fid.createDimension('number_of_points', 3*self.number_of_volumes)
277
278            fid.createDimension('number_of_timesteps', None) #extensible
279
280            # variable definitions
281            fid.createVariable('x', self.precision, ('number_of_points',))
282            fid.createVariable('y', self.precision, ('number_of_points',))
283            fid.createVariable('elevation', self.precision, ('number_of_points',))
284            if domain.geo_reference is not None:
285                domain.geo_reference.write_NetCDF(fid)
286
287            #FIXME: Backwards compatibility
288            fid.createVariable('z', self.precision, ('number_of_points',))
289            #################################
290
291            fid.createVariable('volumes', Int, ('number_of_volumes',
292                                                'number_of_vertices'))
293
294            fid.createVariable('time', self.precision,
295                               ('number_of_timesteps',))
296
297            fid.createVariable('stage', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('xmomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305            fid.createVariable('ymomentum', self.precision,
306                               ('number_of_timesteps',
307                                'number_of_points'))
308
309        #Close
310        fid.close()
311
312
313    def store_connectivity(self):
314        """Specialisation of store_connectivity for net CDF format
315
316        Writes x,y,z coordinates of triangles constituting
317        the bed elevation.
318        """
319
320        from Scientific.IO.NetCDF import NetCDFFile
321
322        from Numeric import concatenate, Int
323
324        domain = self.domain
325
326        #Get NetCDF
327        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
328
329        # Get the variables
330        x = fid.variables['x']
331        y = fid.variables['y']
332        z = fid.variables['elevation']
333
334        volumes = fid.variables['volumes']
335
336        # Get X, Y and bed elevation Z
337        Q = domain.quantities['elevation']
338        X,Y,Z,V = Q.get_vertex_values(xy=True,
339                                      precision = self.precision)
340
341
342
343        x[:] = X.astype(self.precision)
344        y[:] = Y.astype(self.precision)
345        z[:] = Z.astype(self.precision)
346
347        #FIXME: Backwards compatibility
348        z = fid.variables['z']
349        z[:] = Z.astype(self.precision)
350        ################################
351
352        volumes[:] = V.astype(volumes.typecode())
353
354        #Close
355        fid.close()
356
357    def store_timestep(self, names):
358        """Store time and named quantities to file
359        """
360        from Scientific.IO.NetCDF import NetCDFFile
361        import types
362        from time import sleep
363        from os import stat
364
365        #minimum_allowed_depth = 0.001
366        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
367        from Numeric import choose
368
369        #Get NetCDF
370        retries = 0
371        file_open = False
372        while not file_open and retries < 10:
373            try:
374                fid = NetCDFFile(self.filename, 'a') #Open existing file
375            except IOError:
376                #This could happen if someone was reading the file.
377                #In that case, wait a while and try again
378                msg = 'Warning (store_timestep): File %s could not be opened'\
379                      %self.filename
380                msg += ' - trying step %s again' %self.domain.time
381                print msg
382                retries += 1
383                sleep(1)
384            else:
385                file_open = True
386
387        if not file_open:
388            msg = 'File %s could not be opened for append' %self.filename
389            raise msg
390
391
392
393        #Check to see if the file is already too big:
394        time = fid.variables['time']
395        i = len(time)+1
396        file_size = stat(self.filename)[6]
397        file_size_increase =  file_size/i
398        if file_size + file_size_increase > self.max_size*(2**self.recursion):
399            #in order to get the file name and start time correct,
400            #I change the domian.filename and domain.starttime.
401            #This is the only way to do this without changing
402            #other modules (I think).
403
404            #write a filename addon that won't break swollens reader
405            #(10.sww is bad)
406            filename_ext = '_time_%s'%self.domain.time
407            filename_ext = filename_ext.replace('.', '_')
408            #remember the old filename, then give domain a
409            #name with the extension
410            old_domain_filename = self.domain.filename
411            if not self.recursion:
412                self.domain.filename = self.domain.filename+filename_ext
413
414            #change the domain starttime to the current time
415            old_domain_starttime = self.domain.starttime
416            self.domain.starttime = self.domain.time
417
418            #build a new data_structure.
419            next_data_structure=\
420                Data_format_sww(self.domain, mode=self.mode,\
421                                max_size = self.max_size,\
422                                recursion = self.recursion+1)
423            if not self.recursion:
424                print '    file_size = %s'%file_size
425                print '    saving file to %s'%next_data_structure.filename
426            #set up the new data_structure
427            self.domain.writer = next_data_structure
428
429            #FIXME - could be cleaner to use domain.store_timestep etc.
430            next_data_structure.store_connectivity()
431            next_data_structure.store_timestep(names)
432            fid.sync()
433            fid.close()
434
435            #restore the old starttime and filename
436            self.domain.starttime = old_domain_starttime
437            self.domain.filename = old_domain_filename
438        else:
439            self.recursion = False
440            domain = self.domain
441
442            # Get the variables
443            time = fid.variables['time']
444            stage = fid.variables['stage']
445            xmomentum = fid.variables['xmomentum']
446            ymomentum = fid.variables['ymomentum']
447            i = len(time)
448
449            #Store time
450            time[i] = self.domain.time
451
452
453            if type(names) not in [types.ListType, types.TupleType]:
454                names = [names]
455
456            for name in names:
457                # Get quantity
458                Q = domain.quantities[name]
459                A,V = Q.get_vertex_values(xy = False,
460                                          precision = self.precision)
461
462                #FIXME: Make this general (see below)
463                if name == 'stage':
464                    z = fid.variables['elevation']
465                    #print z[:]
466                    #print A-z[:]
467                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
468                    stage[i,:] = A.astype(self.precision)
469                elif name == 'xmomentum':
470                    xmomentum[i,:] = A.astype(self.precision)
471                elif name == 'ymomentum':
472                    ymomentum[i,:] = A.astype(self.precision)
473
474                #As in....
475                #eval( name + '[i,:] = A.astype(self.precision)' )
476                #FIXME: But we need a UNIT test for that before refactoring
477
478
479
480            #Flush and close
481            fid.sync()
482            fid.close()
483
484
485
486#Class for handling checkpoints data
487class Data_format_cpt(Data_format):
488    """Interface to native NetCDF format (.cpt)
489    """
490
491
492    def __init__(self, domain, mode = 'w'):
493        from Scientific.IO.NetCDF import NetCDFFile
494        from Numeric import Int, Float, Float
495
496        self.precision = Float #Use full precision
497
498        Data_format.__init__(self, domain, 'sww', mode)
499
500
501        # NetCDF file definition
502        fid = NetCDFFile(self.filename, mode)
503
504        if mode == 'w':
505            #Create new file
506            fid.institution = 'Geoscience Australia'
507            fid.description = 'Checkpoint data'
508            #fid.smooth = domain.smooth
509            fid.order = domain.default_order
510
511            # dimension definitions
512            fid.createDimension('number_of_volumes', self.number_of_volumes)
513            fid.createDimension('number_of_vertices', 3)
514
515            #Store info at all vertices (no smoothing)
516            fid.createDimension('number_of_points', 3*self.number_of_volumes)
517            fid.createDimension('number_of_timesteps', None) #extensible
518
519            # variable definitions
520
521            #Mesh
522            fid.createVariable('x', self.precision, ('number_of_points',))
523            fid.createVariable('y', self.precision, ('number_of_points',))
524
525
526            fid.createVariable('volumes', Int, ('number_of_volumes',
527                                                'number_of_vertices'))
528
529            fid.createVariable('time', self.precision,
530                               ('number_of_timesteps',))
531
532            #Allocate space for all quantities
533            for name in domain.quantities.keys():
534                fid.createVariable(name, self.precision,
535                                   ('number_of_timesteps',
536                                    'number_of_points'))
537
538        #Close
539        fid.close()
540
541
542    def store_checkpoint(self):
543        """
544        Write x,y coordinates of triangles.
545        Write connectivity (
546        constituting
547        the bed elevation.
548        """
549
550        from Scientific.IO.NetCDF import NetCDFFile
551
552        from Numeric import concatenate
553
554        domain = self.domain
555
556        #Get NetCDF
557        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
558
559        # Get the variables
560        x = fid.variables['x']
561        y = fid.variables['y']
562
563        volumes = fid.variables['volumes']
564
565        # Get X, Y and bed elevation Z
566        Q = domain.quantities['elevation']
567        X,Y,Z,V = Q.get_vertex_values(xy=True,
568                                      precision = self.precision)
569
570
571
572        x[:] = X.astype(self.precision)
573        y[:] = Y.astype(self.precision)
574        z[:] = Z.astype(self.precision)
575
576        volumes[:] = V
577
578        #Close
579        fid.close()
580
581
582    def store_timestep(self, name):
583        """Store time and named quantity to file
584        """
585        from Scientific.IO.NetCDF import NetCDFFile
586        from time import sleep
587
588        #Get NetCDF
589        retries = 0
590        file_open = False
591        while not file_open and retries < 10:
592            try:
593                fid = NetCDFFile(self.filename, 'a') #Open existing file
594            except IOError:
595                #This could happen if someone was reading the file.
596                #In that case, wait a while and try again
597                msg = 'Warning (store_timestep): File %s could not be opened'\
598                          %self.filename
599                msg += ' - trying again'
600                print msg
601                retries += 1
602                sleep(1)
603            else:
604                file_open = True
605
606            if not file_open:
607                msg = 'File %s could not be opened for append' %self.filename
608            raise msg
609
610
611        domain = self.domain
612
613        # Get the variables
614        time = fid.variables['time']
615        stage = fid.variables['stage']
616        i = len(time)
617
618        #Store stage
619        time[i] = self.domain.time
620
621        # Get quantity
622        Q = domain.quantities[name]
623        A,V = Q.get_vertex_values(xy=False,
624                                  precision = self.precision)
625
626        stage[i,:] = A.astype(self.precision)
627
628        #Flush and close
629        fid.sync()
630        fid.close()
631
632
633
634
635
636#Function for storing xya output
637#FIXME Not done yet for this version
638class Data_format_xya(Data_format):
639    """Generic interface to data formats
640    """
641
642
643    def __init__(self, domain, mode = 'w'):
644        from Scientific.IO.NetCDF import NetCDFFile
645        from Numeric import Int, Float, Float32
646
647        self.precision = Float32 #Use single precision
648
649        Data_format.__init__(self, domain, 'xya', mode)
650
651
652
653    #FIXME -This is the old xya format
654    def store_all(self):
655        """Specialisation of store all for xya format
656
657        Writes x,y,z coordinates of triangles constituting
658        the bed elevation.
659        """
660
661        from Numeric import concatenate
662
663        domain = self.domain
664
665        fd = open(self.filename, 'w')
666
667
668        if domain.smooth is True:
669            number_of_points =  len(domain.vertexlist)
670        else:
671            number_of_points = 3*self.number_of_volumes
672
673        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
674
675        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
676                 " # <vertex #> <x> <y> [attributes]" + "\n")
677
678
679        # Get X, Y, bed elevation and friction (index=0,1)
680        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
681                                           indices = (0,1), precision = self.precision)
682
683        bed_eles = A[:,0]
684        fricts = A[:,1]
685
686        # Get stage (index=0)
687        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
688                                       indices = (0,), precision = self.precision)
689
690        stages = B[:,0]
691
692        #<vertex #> <x> <y> [attributes]
693        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
694                                               stages, fricts):
695
696            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
697            fd.write(s)
698
699        #close
700        fd.close()
701
702
703    def store_timestep(self, t, V0, V1, V2):
704        """Store time, water heights (and momentums) to file
705        """
706        pass
707
708
709#Auxiliary
710def write_obj(filename,x,y,z):
711    """Store x,y,z vectors into filename (obj format)
712       Vectors are assumed to have dimension (M,3) where
713       M corresponds to the number elements.
714       triangles are assumed to be disconnected
715
716       The three numbers in each vector correspond to three vertices,
717
718       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
719
720    """
721    #print 'Writing obj to %s' % filename
722
723    import os.path
724
725    root, ext = os.path.splitext(filename)
726    if ext == '.obj':
727        FN = filename
728    else:
729        FN = filename + '.obj'
730
731
732    outfile = open(FN, 'wb')
733    outfile.write("# Triangulation as an obj file\n")
734
735    M, N = x.shape
736    assert N==3  #Assuming three vertices per element
737
738    for i in range(M):
739        for j in range(N):
740            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
741
742    for i in range(M):
743        base = i*N
744        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
745
746    outfile.close()
747
748
749#########################################################
750#Conversion routines
751########################################################
752
753def sww2obj(basefilename, size):
754    """Convert netcdf based data output to obj
755    """
756    from Scientific.IO.NetCDF import NetCDFFile
757
758    from Numeric import Float, zeros
759
760    #Get NetCDF
761    FN = create_filename('.', basefilename, 'sww', size)
762    print 'Reading from ', FN
763    fid = NetCDFFile(FN, 'r')  #Open existing file for read
764
765
766    # Get the variables
767    x = fid.variables['x']
768    y = fid.variables['y']
769    z = fid.variables['elevation']
770    time = fid.variables['time']
771    stage = fid.variables['stage']
772
773    M = size  #Number of lines
774    xx = zeros((M,3), Float)
775    yy = zeros((M,3), Float)
776    zz = zeros((M,3), Float)
777
778    for i in range(M):
779        for j in range(3):
780            xx[i,j] = x[i+j*M]
781            yy[i,j] = y[i+j*M]
782            zz[i,j] = z[i+j*M]
783
784    #Write obj for bathymetry
785    FN = create_filename('.', basefilename, 'obj', size)
786    write_obj(FN,xx,yy,zz)
787
788
789    #Now read all the data with variable information, combine with
790    #x,y info and store as obj
791
792    for k in range(len(time)):
793        t = time[k]
794        print 'Processing timestep %f' %t
795
796        for i in range(M):
797            for j in range(3):
798                zz[i,j] = stage[k,i+j*M]
799
800
801        #Write obj for variable data
802        #FN = create_filename(basefilename, 'obj', size, time=t)
803        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
804        write_obj(FN,xx,yy,zz)
805
806
807def dat2obj(basefilename):
808    """Convert line based data output to obj
809    FIXME: Obsolete?
810    """
811
812    import glob, os
813    from config import data_dir
814
815
816    #Get bathymetry and x,y's
817    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
818
819    from Numeric import zeros, Float
820
821    M = len(lines)  #Number of lines
822    x = zeros((M,3), Float)
823    y = zeros((M,3), Float)
824    z = zeros((M,3), Float)
825
826    ##i = 0
827    for i, line in enumerate(lines):
828        tokens = line.split()
829        values = map(float,tokens)
830
831        for j in range(3):
832            x[i,j] = values[j*3]
833            y[i,j] = values[j*3+1]
834            z[i,j] = values[j*3+2]
835
836        ##i += 1
837
838
839    #Write obj for bathymetry
840    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
841
842
843    #Now read all the data files with variable information, combine with
844    #x,y info
845    #and store as obj
846
847    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
848
849    for filename in files:
850        print 'Processing %s' % filename
851
852        lines = open(data_dir+os.sep+filename,'r').readlines()
853        assert len(lines) == M
854        root, ext = os.path.splitext(filename)
855
856        #Get time from filename
857        i0 = filename.find('_time=')
858        if i0 == -1:
859            #Skip bathymetry file
860            continue
861
862        i0 += 6  #Position where time starts
863        i1 = filename.find('.dat')
864
865        if i1 > i0:
866            t = float(filename[i0:i1])
867        else:
868            raise 'Hmmmm'
869
870
871
872        ##i = 0
873        for i, line in enumerate(lines):
874            tokens = line.split()
875            values = map(float,tokens)
876
877            for j in range(3):
878                z[i,j] = values[j]
879
880            ##i += 1
881
882        #Write obj for variable data
883        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
884
885
886def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
887    """Read netcdf filename1, pick timesteps first:step:last and save to
888    nettcdf file filename2
889    """
890    from Scientific.IO.NetCDF import NetCDFFile
891
892    #Get NetCDF
893    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
894    outfile = NetCDFFile(filename2, 'w')  #Open new file
895
896
897    #Copy dimensions
898    for d in infile.dimensions:
899        outfile.createDimension(d, infile.dimensions[d])
900
901    for name in infile.variables:
902        var = infile.variables[name]
903        outfile.createVariable(name, var.typecode(), var.dimensions)
904
905
906    #Copy the static variables
907    for name in infile.variables:
908        if name == 'time' or name == 'stage':
909            pass
910        else:
911            #Copy
912            outfile.variables[name][:] = infile.variables[name][:]
913
914    #Copy selected timesteps
915    time = infile.variables['time']
916    stage = infile.variables['stage']
917
918    newtime = outfile.variables['time']
919    newstage = outfile.variables['stage']
920
921    if last is None:
922        last = len(time)
923
924    selection = range(first, last, step)
925    for i, j in enumerate(selection):
926        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
927        newtime[i] = time[j]
928        newstage[i,:] = stage[j,:]
929
930    #Close
931    infile.close()
932    outfile.close()
933
934
935#Get data objects
936def get_dataobject(domain, mode='w'):
937    """Return instance of class of given format using filename
938    """
939
940    cls = eval('Data_format_%s' %domain.format)
941    return cls(domain, mode)
942
943def xya2pts(basename_in, basename_out=None, verbose=False,
944            #easting_min=None, easting_max=None,
945            #northing_min=None, northing_max=None,
946            stride = 1,
947            attribute_name = 'elevation',
948            z_func = None):
949    """Read points data from ascii (.xya)
950
951    Example:
952
953              x(m)        y(m)        z(m)
954         0.00000e+00  0.00000e+00  1.3535000e-01
955         0.00000e+00  1.40000e-02  1.3535000e-01
956
957
958
959    Convert to NetCDF pts format which is
960
961    points:  (Nx2) Float array
962    elevation: N Float array
963
964    Only lines that contain three numeric values are processed
965
966    If z_func is specified, it will be applied to the third field
967    """
968
969    import os
970    #from Scientific.IO.NetCDF import NetCDFFile
971    from Numeric import Float, arrayrange, concatenate
972
973    root, ext = os.path.splitext(basename_in)
974
975    if ext == '': ext = '.xya'
976
977    #Get NetCDF
978    infile = open(root + ext, 'r')  #Open existing xya file for read
979
980    if verbose: print 'Reading xya points from %s' %(root + ext)
981
982    points = []
983    attribute = []
984    for i, line in enumerate(infile.readlines()):
985
986        if i % stride != 0: continue
987
988        fields = line.split()
989
990        try:
991            assert len(fields) == 3
992        except:
993            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
994
995        try:
996            x = float( fields[0] )
997            y = float( fields[1] )
998            z = float( fields[2] )
999        except:
1000            continue
1001
1002        points.append( [x, y] )
1003
1004        if callable(z_func):
1005            attribute.append(z_func(z))
1006        else:
1007            attribute.append(z)
1008
1009
1010    #Get output file
1011    if basename_out == None:
1012        ptsname = root + '.pts'
1013    else:
1014        ptsname = basename_out + '.pts'
1015
1016    if verbose: print 'Store to NetCDF file %s' %ptsname
1017    write_ptsfile(ptsname, points, attribute, attribute_name)
1018
1019
1020
1021######Obsoleted by export_points in load_mesh
1022def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1023                  zone=None, xllcorner=None, yllcorner=None):
1024    """Write points and associated attribute to pts (NetCDF) format
1025    """
1026
1027    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1028
1029    from Numeric import Float
1030
1031    if attribute_name is None:
1032        attribute_name = 'attribute'
1033
1034
1035    from Scientific.IO.NetCDF import NetCDFFile
1036
1037    # NetCDF file definition
1038    outfile = NetCDFFile(ptsname, 'w')
1039
1040
1041    #Create new file
1042    outfile.institution = 'Geoscience Australia'
1043    outfile.description = 'NetCDF pts format for compact and '\
1044                          'portable storage of spatial point data'
1045
1046
1047    #Georeferencing
1048    from coordinate_transforms.geo_reference import Geo_reference
1049    if zone is None:
1050        assert xllcorner is None, 'xllcorner must be None'
1051        assert yllcorner is None, 'yllcorner must be None'       
1052        Geo_reference().write_NetCDF(outfile)
1053    else:
1054        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1055       
1056
1057
1058    outfile.createDimension('number_of_points', len(points))
1059    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1060
1061    # variable definitions
1062    outfile.createVariable('points', Float, ('number_of_points',
1063                                             'number_of_dimensions'))
1064    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1065
1066    # Get handles to the variables
1067    nc_points = outfile.variables['points']
1068    nc_attribute = outfile.variables[attribute_name]
1069
1070    #Store data
1071    nc_points[:, :] = points
1072    nc_attribute[:] = attribute
1073
1074    outfile.close()
1075
1076
1077def dem2pts(basename_in, basename_out=None, 
1078            easting_min=None, easting_max=None,
1079            northing_min=None, northing_max=None,
1080            use_cache=False, verbose=False,):
1081    """Read Digitial Elevation model from the following NetCDF format (.dem)
1082
1083    Example:
1084
1085    ncols         3121
1086    nrows         1800
1087    xllcorner     722000
1088    yllcorner     5893000
1089    cellsize      25
1090    NODATA_value  -9999
1091    138.3698 137.4194 136.5062 135.5558 ..........
1092
1093    Convert to NetCDF pts format which is
1094
1095    points:  (Nx2) Float array
1096    elevation: N Float array
1097    """
1098
1099
1100
1101    kwargs = {'basename_out': basename_out, 
1102              'easting_min': easting_min,
1103              'easting_max': easting_max,
1104              'northing_min': northing_min,
1105              'northing_max': northing_max,
1106              'verbose': verbose}
1107
1108    if use_cache is True:
1109        from caching import cache
1110        result = cache(_dem2pts, basename_in, kwargs,
1111                       dependencies = [basename_in + '.dem'],
1112                       verbose = verbose)
1113
1114    else:
1115        result = apply(_dem2pts, [basename_in], kwargs)
1116       
1117    return result
1118
1119
1120def _dem2pts(basename_in, basename_out=None, verbose=False,
1121            easting_min=None, easting_max=None,
1122            northing_min=None, northing_max=None):
1123    """Read Digitial Elevation model from the following NetCDF format (.dem)
1124
1125    Internal function. See public function dem2pts for details.   
1126    """
1127
1128    #FIXME: Can this be written feasibly using write_pts?
1129
1130    import os
1131    from Scientific.IO.NetCDF import NetCDFFile
1132    from Numeric import Float, zeros, reshape, sum
1133
1134    root = basename_in
1135
1136    #Get NetCDF
1137    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1138
1139    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1140
1141    ncols = infile.ncols[0]
1142    nrows = infile.nrows[0]
1143    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1144    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1145    cellsize = infile.cellsize[0]
1146    NODATA_value = infile.NODATA_value[0]
1147    dem_elevation = infile.variables['elevation']
1148
1149    zone = infile.zone[0]
1150    false_easting = infile.false_easting[0]
1151    false_northing = infile.false_northing[0]
1152
1153    #Text strings
1154    projection = infile.projection
1155    datum = infile.datum
1156    units = infile.units
1157
1158
1159    #Get output file
1160    if basename_out == None:
1161        ptsname = root + '.pts'
1162    else:
1163        ptsname = basename_out + '.pts'
1164
1165    if verbose: print 'Store to NetCDF file %s' %ptsname
1166    # NetCDF file definition
1167    outfile = NetCDFFile(ptsname, 'w')
1168
1169    #Create new file
1170    outfile.institution = 'Geoscience Australia'
1171    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1172                      'of spatial point data'
1173    #assign default values
1174    if easting_min is None: easting_min = xllcorner
1175    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1176    if northing_min is None: northing_min = yllcorner
1177    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1178
1179    #compute offsets to update georeferencing
1180    easting_offset = xllcorner - easting_min
1181    northing_offset = yllcorner - northing_min
1182
1183    #Georeferencing
1184    outfile.zone = zone
1185    outfile.xllcorner = easting_min #Easting of lower left corner
1186    outfile.yllcorner = northing_min #Northing of lower left corner
1187    outfile.false_easting = false_easting
1188    outfile.false_northing = false_northing
1189
1190    outfile.projection = projection
1191    outfile.datum = datum
1192    outfile.units = units
1193
1194
1195    #Grid info (FIXME: probably not going to be used, but heck)
1196    outfile.ncols = ncols
1197    outfile.nrows = nrows
1198
1199    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1200    totalnopoints = nrows*ncols
1201
1202    # calculating number of NODATA_values for each row in clipped region
1203    #FIXME: use array operations to do faster
1204    nn = 0
1205    k = 0
1206    i1_0 = 0
1207    j1_0 = 0
1208    thisj = 0
1209    thisi = 0
1210    for i in range(nrows):
1211        y = (nrows-i-1)*cellsize + yllcorner
1212        for j in range(ncols):
1213            x = j*cellsize + xllcorner
1214            if easting_min <= x <= easting_max and \
1215               northing_min <= y <= northing_max:
1216                thisj = j
1217                thisi = i
1218                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1219
1220                if k == 0:
1221                    i1_0 = i
1222                    j1_0 = j
1223                k += 1   
1224
1225    index1 = j1_0
1226    index2 = thisj
1227   
1228    # dimension definitions
1229    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1230    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1231   
1232    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0) 
1233    nopoints = clippednopoints-nn
1234
1235    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1236   
1237    if verbose and nn > 0:
1238        print 'There are %d values in the elevation' %totalnopoints
1239        print 'There are %d values in the clipped elevation' %clippednopoints
1240        print 'There are %d NODATA_values in the clipped elevation' %nn
1241       
1242    outfile.createDimension('number_of_points', nopoints)
1243    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1244
1245    # variable definitions
1246    outfile.createVariable('points', Float, ('number_of_points',
1247                                             'number_of_dimensions'))
1248    outfile.createVariable('elevation', Float, ('number_of_points',))
1249
1250    # Get handles to the variables
1251    points = outfile.variables['points']
1252    elevation = outfile.variables['elevation']
1253
1254    lenv = index2-index1+1
1255    #Store data
1256    global_index = 0
1257    #for i in range(nrows):
1258    for i in range(i1_0,thisi+1,1):
1259        if verbose and i%((nrows+10)/10)==0:
1260            print 'Processing row %d of %d' %(i, nrows)
1261
1262        lower_index = global_index
1263
1264        v = dem_elevation_r[i,index1:index2+1]
1265        no_NODATA = sum(v == NODATA_value)
1266        if no_NODATA > 0:
1267            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1268        else:
1269            newcols = lenv #ncols_in_bounding_box
1270
1271        telev = zeros(newcols, Float)
1272        tpoints = zeros((newcols, 2), Float)
1273
1274        local_index = 0
1275       
1276        y = (nrows-i-1)*cellsize + yllcorner
1277        #for j in range(ncols):
1278        for j in range(j1_0,index2+1,1):
1279
1280            x = j*cellsize + xllcorner
1281            if easting_min <= x <= easting_max and \
1282               northing_min <= y <= northing_max and \
1283               dem_elevation_r[i,j] <> NODATA_value:
1284                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1285                telev[local_index] = dem_elevation_r[i, j]
1286                global_index += 1
1287                local_index += 1
1288               
1289        upper_index = global_index
1290
1291        if upper_index == lower_index + newcols: 
1292            points[lower_index:upper_index, :] = tpoints
1293            elevation[lower_index:upper_index] = telev
1294
1295    assert global_index == nopoints, 'index not equal to number of points'
1296
1297    infile.close()
1298    outfile.close()
1299
1300
1301
1302def _read_hecras_cross_sections(lines):
1303    """Return block of surface lines for each cross section
1304    Starts with SURFACE LINE,
1305    Ends with END CROSS-SECTION
1306    """
1307
1308    points = []
1309
1310    reading_surface = False
1311    for i, line in enumerate(lines):
1312
1313        if len(line.strip()) == 0:    #Ignore blanks
1314            continue
1315
1316        if lines[i].strip().startswith('SURFACE LINE'):
1317            reading_surface = True
1318            continue
1319
1320        if lines[i].strip().startswith('END') and reading_surface:
1321            yield points
1322            reading_surface = False
1323            points = []
1324
1325        if reading_surface:
1326            fields = line.strip().split(',')
1327            easting = float(fields[0])
1328            northing = float(fields[1])
1329            elevation = float(fields[2])
1330            points.append([easting, northing, elevation])
1331
1332
1333
1334
1335def hecras_cross_sections2pts(basename_in,
1336                              basename_out=None,
1337                              verbose=False):
1338    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1339
1340    Example:
1341
1342
1343# RAS export file created on Mon 15Aug2005 11:42
1344# by HEC-RAS Version 3.1.1
1345
1346BEGIN HEADER:
1347  UNITS: METRIC
1348  DTM TYPE: TIN
1349  DTM: v:\1\cit\perth_topo\river_tin
1350  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1351  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1352  MAP PROJECTION: UTM
1353  PROJECTION ZONE: 50
1354  DATUM: AGD66
1355  VERTICAL DATUM:
1356  NUMBER OF REACHES:  19
1357  NUMBER OF CROSS-SECTIONS:  14206
1358END HEADER:
1359
1360
1361Only the SURFACE LINE data of the following form will be utilised
1362
1363  CROSS-SECTION:
1364    STREAM ID:Southern-Wungong
1365    REACH ID:Southern-Wungong
1366    STATION:19040.*
1367    CUT LINE:
1368      405548.671603161 , 6438142.7594925
1369      405734.536092045 , 6438326.10404912
1370      405745.130459356 , 6438331.48627354
1371      405813.89633823 , 6438368.6272789
1372    SURFACE LINE:
1373     405548.67,   6438142.76,   35.37
1374     405552.24,   6438146.28,   35.41
1375     405554.78,   6438148.78,   35.44
1376     405555.80,   6438149.79,   35.44
1377     405559.37,   6438153.31,   35.45
1378     405560.88,   6438154.81,   35.44
1379     405562.93,   6438156.83,   35.42
1380     405566.50,   6438160.35,   35.38
1381     405566.99,   6438160.83,   35.37
1382     ...
1383   END CROSS-SECTION
1384
1385    Convert to NetCDF pts format which is
1386
1387    points:  (Nx2) Float array
1388    elevation: N Float array
1389    """
1390
1391    #FIXME: Can this be written feasibly using write_pts?
1392
1393    import os
1394    from Scientific.IO.NetCDF import NetCDFFile
1395    from Numeric import Float, zeros, reshape
1396
1397    root = basename_in
1398
1399    #Get ASCII file
1400    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1401
1402    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1403
1404    lines = infile.readlines()
1405    infile.close()
1406
1407    if verbose: print 'Converting to pts format'
1408
1409    i = 0
1410    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1411        i += 1
1412
1413    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1414    i += 1
1415
1416    assert lines[i].strip().upper().startswith('UNITS:')
1417    units = lines[i].strip().split()[1]
1418    i += 1
1419
1420    assert lines[i].strip().upper().startswith('DTM TYPE:')
1421    i += 1
1422
1423    assert lines[i].strip().upper().startswith('DTM:')
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('STREAM')
1427    i += 1
1428
1429    assert lines[i].strip().upper().startswith('CROSS')
1430    i += 1
1431
1432    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1433    projection = lines[i].strip().split(':')[1]
1434    i += 1
1435
1436    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1437    zone = int(lines[i].strip().split(':')[1])
1438    i += 1
1439
1440    assert lines[i].strip().upper().startswith('DATUM:')
1441    datum = lines[i].strip().split(':')[1]
1442    i += 1
1443
1444    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1445    i += 1
1446
1447    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1448    i += 1
1449
1450    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1451    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1452    i += 1
1453
1454
1455    #Now read all points
1456    points = []
1457    elevation = []
1458    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1459        for k, entry in enumerate(entries):
1460            points.append(entry[:2])
1461            elevation.append(entry[2])
1462
1463
1464    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1465          %(j+1, number_of_cross_sections)
1466    assert j+1 == number_of_cross_sections, msg
1467
1468    #Get output file
1469    if basename_out == None:
1470        ptsname = root + '.pts'
1471    else:
1472        ptsname = basename_out + '.pts'
1473
1474    #FIXME (DSG-ON): use loadASCII export_points_file
1475    if verbose: print 'Store to NetCDF file %s' %ptsname
1476    # NetCDF file definition
1477    outfile = NetCDFFile(ptsname, 'w')
1478
1479    #Create new file
1480    outfile.institution = 'Geoscience Australia'
1481    outfile.description = 'NetCDF pts format for compact and portable ' +\
1482                          'storage of spatial point data derived from HEC-RAS'
1483
1484    #Georeferencing
1485    outfile.zone = zone
1486    outfile.xllcorner = 0.0
1487    outfile.yllcorner = 0.0
1488    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1489    outfile.false_northing = 1000000
1490
1491    outfile.projection = projection
1492    outfile.datum = datum
1493    outfile.units = units
1494
1495
1496    outfile.createDimension('number_of_points', len(points))
1497    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1498
1499    # variable definitions
1500    outfile.createVariable('points', Float, ('number_of_points',
1501                                             'number_of_dimensions'))
1502    outfile.createVariable('elevation', Float, ('number_of_points',))
1503
1504    # Get handles to the variables
1505    outfile.variables['points'][:] = points
1506    outfile.variables['elevation'][:] = elevation
1507
1508    outfile.close()
1509
1510
1511
1512def sww2dem(basename_in, basename_out = None,
1513            quantity = None,
1514            timestep = None,
1515            reduction = None,
1516            cellsize = 10,
1517            NODATA_value = -9999,
1518            easting_min = None,
1519            easting_max = None,
1520            northing_min = None,
1521            northing_max = None,
1522            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1523            verbose = False,
1524            origin = None,
1525            datum = 'WGS84',
1526            format = 'ers'):
1527
1528    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1529
1530    Example (ASC):
1531
1532    ncols         3121
1533    nrows         1800
1534    xllcorner     722000
1535    yllcorner     5893000
1536    cellsize      25
1537    NODATA_value  -9999
1538    138.3698 137.4194 136.5062 135.5558 ..........
1539
1540    Also write accompanying file with same basename_in but extension .prj
1541    used to fix the UTM zone, datum, false northings and eastings.
1542
1543    The prj format is assumed to be as
1544
1545    Projection    UTM
1546    Zone          56
1547    Datum         WGS84
1548    Zunits        NO
1549    Units         METERS
1550    Spheroid      WGS84
1551    Xshift        0.0000000000
1552    Yshift        10000000.0000000000
1553    Parameters
1554
1555
1556    The parameter quantity must be the name of an existing quantity or
1557    an expression involving existing quantities. The default is
1558    'elevation'.
1559
1560    if timestep (an index) is given, output quantity at that timestep
1561
1562    if reduction is given use that to reduce quantity over all timesteps.
1563
1564    datum
1565
1566    format can be either 'asc' or 'ers'
1567    """
1568
1569    import sys
1570    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1571    from Numeric import array2string
1572
1573    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1574    from util import apply_expression_to_dictionary
1575
1576    msg = 'Format must be either asc or ers'
1577    assert format.lower() in ['asc', 'ers'], msg
1578
1579
1580    false_easting = 500000
1581    false_northing = 10000000
1582
1583    if quantity is None:
1584        quantity = 'elevation'
1585
1586    if reduction is None:
1587        reduction = max
1588
1589    if basename_out is None:
1590        basename_out = basename_in + '_%s' %quantity
1591
1592    swwfile = basename_in + '.sww'
1593    demfile = basename_out + '.' + format
1594    # Note the use of a .ers extension is optional (write_ermapper_grid will
1595    # deal with either option
1596
1597    #Read sww file
1598    if verbose: print 'Reading from %s' %swwfile
1599    from Scientific.IO.NetCDF import NetCDFFile
1600    fid = NetCDFFile(swwfile)
1601
1602    #Get extent and reference
1603    x = fid.variables['x'][:]
1604    y = fid.variables['y'][:]
1605    volumes = fid.variables['volumes'][:]
1606
1607    number_of_timesteps = fid.dimensions['number_of_timesteps']
1608    number_of_points = fid.dimensions['number_of_points']
1609    if origin is None:
1610
1611        #Get geo_reference
1612        #sww files don't have to have a geo_ref
1613        try:
1614            geo_reference = Geo_reference(NetCDFObject=fid)
1615        except AttributeError, e:
1616            geo_reference = Geo_reference() #Default georef object
1617
1618        xllcorner = geo_reference.get_xllcorner()
1619        yllcorner = geo_reference.get_yllcorner()
1620        zone = geo_reference.get_zone()
1621    else:
1622        zone = origin[0]
1623        xllcorner = origin[1]
1624        yllcorner = origin[2]
1625
1626
1627
1628    #FIXME: Refactor using code from file_function.statistics
1629    #Something like print swwstats(swwname)
1630    if verbose:
1631        x = fid.variables['x'][:]
1632        y = fid.variables['y'][:]
1633        times = fid.variables['time'][:]
1634        print '------------------------------------------------'
1635        print 'Statistics of SWW file:'
1636        print '  Name: %s' %swwfile
1637        print '  Reference:'
1638        print '    Lower left corner: [%f, %f]'\
1639              %(xllcorner, yllcorner)
1640        print '    Start time: %f' %fid.starttime[0]
1641        print '  Extent:'
1642        print '    x [m] in [%f, %f], len(x) == %d'\
1643              %(min(x.flat), max(x.flat), len(x.flat))
1644        print '    y [m] in [%f, %f], len(y) == %d'\
1645              %(min(y.flat), max(y.flat), len(y.flat))
1646        print '    t [s] in [%f, %f], len(t) == %d'\
1647              %(min(times), max(times), len(times))
1648        print '  Quantities [SI units]:'
1649        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1650            q = fid.variables[name][:].flat
1651            print '    %s in [%f, %f]' %(name, min(q), max(q))
1652
1653
1654
1655
1656
1657    #Get quantity and reduce if applicable
1658    if verbose: print 'Processing quantity %s' %quantity
1659
1660    #Turn NetCDF objects into Numeric arrays
1661    quantity_dict = {}
1662    for name in fid.variables.keys():
1663        quantity_dict[name] = fid.variables[name][:]
1664
1665
1666    q = apply_expression_to_dictionary(quantity, quantity_dict)
1667
1668
1669
1670    if len(q.shape) == 2:
1671        #q has a time component and needs to be reduced along
1672        #the temporal dimension
1673        if verbose: print 'Reducing quantity %s' %quantity
1674        q_reduced = zeros( number_of_points, Float )
1675
1676        for k in range(number_of_points):
1677            q_reduced[k] = reduction( q[:,k] )
1678
1679        q = q_reduced
1680
1681    #Post condition: Now q has dimension: number_of_points
1682    assert len(q.shape) == 1
1683    assert q.shape[0] == number_of_points
1684
1685
1686    if verbose:
1687        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1688
1689
1690    #Create grid and update xll/yll corner and x,y
1691
1692    #Relative extent
1693    if easting_min is None:
1694        xmin = min(x)
1695    else:
1696        xmin = easting_min - xllcorner
1697
1698    if easting_max is None:
1699        xmax = max(x)
1700    else:
1701        xmax = easting_max - xllcorner
1702
1703    if northing_min is None:
1704        ymin = min(y)
1705    else:
1706        ymin = northing_min - yllcorner
1707
1708    if northing_max is None:
1709        ymax = max(y)
1710    else:
1711        ymax = northing_max - yllcorner
1712
1713
1714
1715    if verbose: print 'Creating grid'
1716    ncols = int((xmax-xmin)/cellsize)+1
1717    nrows = int((ymax-ymin)/cellsize)+1
1718
1719
1720    #New absolute reference and coordinates
1721    newxllcorner = xmin+xllcorner
1722    newyllcorner = ymin+yllcorner
1723
1724    x = x+xllcorner-newxllcorner
1725    y = y+yllcorner-newyllcorner
1726
1727    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1728    assert len(vertex_points.shape) == 2
1729
1730
1731
1732    grid_points = zeros ( (ncols*nrows, 2), Float )
1733
1734
1735    for i in xrange(nrows):
1736        if format.lower() == 'asc':
1737            yg = i*cellsize
1738        else:
1739            #this will flip the order of the y values for ers
1740            yg = (nrows-i)*cellsize
1741
1742        for j in xrange(ncols):
1743            xg = j*cellsize
1744            k = i*ncols + j
1745
1746            grid_points[k,0] = xg
1747            grid_points[k,1] = yg
1748
1749    #Interpolate
1750    from least_squares import Interpolation
1751
1752
1753    #FIXME: This should be done with precrop = True (?), otherwise it'll
1754    #take forever. With expand_search  set to False, some grid points might
1755    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1756
1757    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1758                           precrop = False,
1759                           expand_search = expand_search,
1760                           verbose = verbose)
1761
1762
1763
1764    #Interpolate using quantity values
1765    if verbose: print 'Interpolating'
1766    grid_values = interp.interpolate(q).flat
1767
1768
1769    if verbose:
1770        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1771                                                      max(grid_values))
1772
1773    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1774    P = interp.mesh.get_boundary_polygon()
1775    outside_indices = outside_polygon(grid_points, P, closed=True)
1776
1777    for i in outside_indices:
1778        grid_values[i] = NODATA_value
1779
1780
1781
1782
1783    if format.lower() == 'ers':
1784        # setup ERS header information
1785        grid_values = reshape(grid_values,(nrows, ncols))
1786        header = {}
1787        header['datum'] = '"' + datum + '"'
1788        # FIXME The use of hardwired UTM and zone number needs to be made optional
1789        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1790        header['projection'] = '"UTM-' + str(zone) + '"'
1791        header['coordinatetype'] = 'EN'
1792        if header['coordinatetype'] == 'LL':
1793            header['longitude'] = str(newxllcorner)
1794            header['latitude'] = str(newyllcorner)
1795        elif header['coordinatetype'] == 'EN':
1796            header['eastings'] = str(newxllcorner)
1797            header['northings'] = str(newyllcorner)
1798        header['nullcellvalue'] = str(NODATA_value)
1799        header['xdimension'] = str(cellsize)
1800        header['ydimension'] = str(cellsize)
1801        header['value'] = '"' + quantity + '"'
1802        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1803
1804
1805        #Write
1806        if verbose: print 'Writing %s' %demfile
1807        import ermapper_grids
1808        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1809
1810        fid.close()
1811    else:
1812        #Write to Ascii format
1813
1814        #Write prj file
1815        prjfile = basename_out + '.prj'
1816
1817        if verbose: print 'Writing %s' %prjfile
1818        prjid = open(prjfile, 'w')
1819        prjid.write('Projection    %s\n' %'UTM')
1820        prjid.write('Zone          %d\n' %zone)
1821        prjid.write('Datum         %s\n' %datum)
1822        prjid.write('Zunits        NO\n')
1823        prjid.write('Units         METERS\n')
1824        prjid.write('Spheroid      %s\n' %datum)
1825        prjid.write('Xshift        %d\n' %false_easting)
1826        prjid.write('Yshift        %d\n' %false_northing)
1827        prjid.write('Parameters\n')
1828        prjid.close()
1829
1830
1831
1832        if verbose: print 'Writing %s' %demfile
1833
1834        ascid = open(demfile, 'w')
1835
1836        ascid.write('ncols         %d\n' %ncols)
1837        ascid.write('nrows         %d\n' %nrows)
1838        ascid.write('xllcorner     %d\n' %newxllcorner)
1839        ascid.write('yllcorner     %d\n' %newyllcorner)
1840        ascid.write('cellsize      %f\n' %cellsize)
1841        ascid.write('NODATA_value  %d\n' %NODATA_value)
1842
1843
1844        #Get bounding polygon from mesh
1845        #P = interp.mesh.get_boundary_polygon()
1846        #inside_indices = inside_polygon(grid_points, P)
1847
1848        for i in range(nrows):
1849            if verbose and i%((nrows+10)/10)==0:
1850                print 'Doing row %d of %d' %(i, nrows)
1851
1852            base_index = (nrows-i-1)*ncols
1853
1854            slice = grid_values[base_index:base_index+ncols]
1855            s = array2string(slice, max_line_width=sys.maxint)
1856            ascid.write(s[1:-1] + '\n')
1857
1858
1859            #print
1860            #for j in range(ncols):
1861            #    index = base_index+j#
1862            #    print grid_values[index],
1863            #    ascid.write('%f ' %grid_values[index])
1864            #ascid.write('\n')
1865
1866        #Close
1867        ascid.close()
1868        fid.close()
1869
1870#Backwards compatibility
1871def sww2asc(basename_in, basename_out = None,
1872            quantity = None,
1873            timestep = None,
1874            reduction = None,
1875            cellsize = 10,
1876            verbose = False,
1877            origin = None):
1878    print 'sww2asc will soon be obsoleted - please use sww2dem'
1879    sww2dem(basename_in,
1880            basename_out = basename_out,
1881            quantity = quantity,
1882            timestep = timestep,
1883            reduction = reduction,
1884            cellsize = cellsize,
1885            verbose = verbose,
1886            origin = origin,
1887            datum = 'WGS84',
1888            format = 'asc')
1889
1890def sww2ers(basename_in, basename_out = None,
1891            quantity = None,
1892            timestep = None,
1893            reduction = None,
1894            cellsize = 10,
1895            verbose = False,
1896            origin = None,
1897            datum = 'WGS84'):
1898    print 'sww2ers will soon be obsoleted - please use sww2dem'
1899    sww2dem(basename_in,
1900            basename_out = basename_out,
1901            quantity = quantity,
1902            timestep = timestep,
1903            reduction = reduction,
1904            cellsize = cellsize,
1905            verbose = verbose,
1906            origin = origin,
1907            datum = datum,
1908            format = 'ers')
1909################################# END COMPATIBILITY ##############
1910
1911
1912
1913def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1914                                  use_cache = False,
1915                                  verbose = False):
1916    """Read Digitial Elevation model from the following ASCII format (.asc)   
1917
1918    Example:
1919
1920    ncols         3121
1921    nrows         1800
1922    xllcorner     722000
1923    yllcorner     5893000
1924    cellsize      25
1925    NODATA_value  -9999
1926    138.3698 137.4194 136.5062 135.5558 ..........
1927
1928    Convert basename_in + '.asc' to NetCDF format (.dem)
1929    mimicking the ASCII format closely.
1930
1931
1932    An accompanying file with same basename_in but extension .prj must exist
1933    and is used to fix the UTM zone, datum, false northings and eastings.
1934
1935    The prj format is assumed to be as
1936
1937    Projection    UTM
1938    Zone          56
1939    Datum         WGS84
1940    Zunits        NO
1941    Units         METERS
1942    Spheroid      WGS84
1943    Xshift        0.0000000000
1944    Yshift        10000000.0000000000
1945    Parameters
1946    """
1947
1948
1949
1950    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1951
1952    if use_cache is True:
1953        from caching import cache
1954        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1955                       dependencies = [basename_in + '.asc'],
1956                       verbose = verbose)
1957
1958    else:
1959        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1960       
1961    return result
1962
1963   
1964   
1965
1966
1967
1968def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1969                                  verbose = False):
1970    """Read Digitial Elevation model from the following ASCII format (.asc)
1971
1972    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1973    """
1974
1975    import os
1976    from Scientific.IO.NetCDF import NetCDFFile
1977    from Numeric import Float, array
1978
1979    #root, ext = os.path.splitext(basename_in)
1980    root = basename_in
1981
1982    ###########################################
1983    # Read Meta data
1984    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1985    metadatafile = open(root + '.prj')
1986    metalines = metadatafile.readlines()
1987    metadatafile.close()
1988
1989    L = metalines[0].strip().split()
1990    assert L[0].strip().lower() == 'projection'
1991    projection = L[1].strip()                   #TEXT
1992
1993    L = metalines[1].strip().split()
1994    assert L[0].strip().lower() == 'zone'
1995    zone = int(L[1].strip())
1996
1997    L = metalines[2].strip().split()
1998    assert L[0].strip().lower() == 'datum'
1999    datum = L[1].strip()                        #TEXT
2000
2001    L = metalines[3].strip().split()
2002    assert L[0].strip().lower() == 'zunits'     #IGNORE
2003    zunits = L[1].strip()                       #TEXT
2004
2005    L = metalines[4].strip().split()
2006    assert L[0].strip().lower() == 'units'
2007    units = L[1].strip()                        #TEXT
2008
2009    L = metalines[5].strip().split()
2010    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2011    spheroid = L[1].strip()                     #TEXT
2012
2013    L = metalines[6].strip().split()
2014    assert L[0].strip().lower() == 'xshift'
2015    false_easting = float(L[1].strip())
2016
2017    L = metalines[7].strip().split()
2018    assert L[0].strip().lower() == 'yshift'
2019    false_northing = float(L[1].strip())
2020
2021    #print false_easting, false_northing, zone, datum
2022
2023
2024    ###########################################
2025    #Read DEM data
2026
2027    datafile = open(basename_in + '.asc')
2028
2029    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2030    lines = datafile.readlines()
2031    datafile.close()
2032
2033    if verbose: print 'Got', len(lines), ' lines'
2034
2035    ncols = int(lines[0].split()[1].strip())
2036    nrows = int(lines[1].split()[1].strip())
2037    xllcorner = float(lines[2].split()[1].strip())
2038    yllcorner = float(lines[3].split()[1].strip())
2039    cellsize = float(lines[4].split()[1].strip())
2040    NODATA_value = int(lines[5].split()[1].strip())
2041
2042    assert len(lines) == nrows + 6
2043
2044
2045    ##########################################
2046
2047
2048    if basename_out == None:
2049        netcdfname = root + '.dem'
2050    else:
2051        netcdfname = basename_out + '.dem'
2052
2053    if verbose: print 'Store to NetCDF file %s' %netcdfname
2054    # NetCDF file definition
2055    fid = NetCDFFile(netcdfname, 'w')
2056
2057    #Create new file
2058    fid.institution = 'Geoscience Australia'
2059    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2060                      'of spatial point data'
2061
2062    fid.ncols = ncols
2063    fid.nrows = nrows
2064    fid.xllcorner = xllcorner
2065    fid.yllcorner = yllcorner
2066    fid.cellsize = cellsize
2067    fid.NODATA_value = NODATA_value
2068
2069    fid.zone = zone
2070    fid.false_easting = false_easting
2071    fid.false_northing = false_northing
2072    fid.projection = projection
2073    fid.datum = datum
2074    fid.units = units
2075
2076
2077    # dimension definitions
2078    fid.createDimension('number_of_rows', nrows)
2079    fid.createDimension('number_of_columns', ncols)
2080
2081    # variable definitions
2082    fid.createVariable('elevation', Float, ('number_of_rows',
2083                                            'number_of_columns'))
2084
2085    # Get handles to the variables
2086    elevation = fid.variables['elevation']
2087
2088    #Store data
2089    n = len(lines[6:])
2090    for i, line in enumerate(lines[6:]):
2091        fields = line.split()
2092        if verbose and i%((n+10)/10)==0:
2093            print 'Processing row %d of %d' %(i, nrows)
2094
2095        elevation[i, :] = array([float(x) for x in fields])
2096
2097    fid.close()
2098
2099
2100
2101
2102
2103def ferret2sww(basename_in, basename_out = None,
2104               verbose = False,
2105               minlat = None, maxlat = None,
2106               minlon = None, maxlon = None,
2107               mint = None, maxt = None, mean_stage = 0,
2108               origin = None, zscale = 1,
2109               fail_on_NaN = True,
2110               NaN_filler = 0,
2111               elevation = None,
2112               inverted_bathymetry = False
2113               ): #FIXME: Bathymetry should be obtained
2114                                  #from MOST somehow.
2115                                  #Alternatively from elsewhere
2116                                  #or, as a last resort,
2117                                  #specified here.
2118                                  #The value of -100 will work
2119                                  #for the Wollongong tsunami
2120                                  #scenario but is very hacky
2121    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2122    sww format native to pyvolution.
2123
2124    Specify only basename_in and read files of the form
2125    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2126    relative height, x-velocity and y-velocity, respectively.
2127
2128    Also convert latitude and longitude to UTM. All coordinates are
2129    assumed to be given in the GDA94 datum.
2130
2131    min's and max's: If omitted - full extend is used.
2132    To include a value min may equal it, while max must exceed it.
2133    Lat and lon are assuemd to be in decimal degrees
2134
2135    origin is a 3-tuple with geo referenced
2136    UTM coordinates (zone, easting, northing)
2137
2138    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2139    which means that longitude is the fastest
2140    varying dimension (row major order, so to speak)
2141
2142    ferret2sww uses grid points as vertices in a triangular grid
2143    counting vertices from lower left corner upwards, then right
2144    """
2145
2146    import os
2147    from Scientific.IO.NetCDF import NetCDFFile
2148    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2149    from Numeric import allclose, around
2150
2151    precision = Float
2152
2153    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2154
2155    if minlat != None:
2156        assert -90 < minlat < 90 , msg
2157    if maxlat != None:
2158        assert -90 < maxlat < 90 , msg
2159    if minlon != None:
2160        assert -180 < minlon < 180 , msg
2161    if maxlon != None:
2162        assert -180 < maxlon < 180 , msg
2163 
2164
2165    #Get NetCDF data
2166    if verbose: print 'Reading files %s_*.nc' %basename_in
2167    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2168    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2169    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2170    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2171
2172    if basename_out is None:
2173        swwname = basename_in + '.sww'
2174    else:
2175        swwname = basename_out + '.sww'
2176
2177#    Get dimensions of file_h
2178    for dimension in file_h.dimensions.keys():
2179        if dimension[:3] == 'LON':
2180            dim_h_longitude = dimension
2181        if dimension[:3] == 'LAT':
2182            dim_h_latitude = dimension
2183        if dimension[:4] == 'TIME':
2184            dim_h_time = dimension
2185
2186#    print 'long:', dim_h_longitude
2187#    print 'lats:', dim_h_latitude
2188#    print 'times:', dim_h_time
2189
2190    times = file_h.variables[dim_h_time]
2191    latitudes = file_h.variables[dim_h_latitude]
2192    longitudes = file_h.variables[dim_h_longitude]
2193   
2194# get dimensions for file_e
2195    for dimension in file_e.dimensions.keys():
2196        if dimension[:3] == 'LON':
2197            dim_e_longitude = dimension
2198        if dimension[:3] == 'LAT':
2199            dim_e_latitude = dimension
2200        if dimension[:4] == 'TIME':
2201            dim_e_time = dimension
2202
2203# get dimensions for file_u
2204    for dimension in file_u.dimensions.keys():
2205        if dimension[:3] == 'LON':
2206            dim_u_longitude = dimension
2207        if dimension[:3] == 'LAT':
2208            dim_u_latitude = dimension
2209        if dimension[:4] == 'TIME':
2210            dim_u_time = dimension
2211           
2212# get dimensions for file_v
2213    for dimension in file_v.dimensions.keys():
2214        if dimension[:3] == 'LON':
2215            dim_v_longitude = dimension
2216        if dimension[:3] == 'LAT':
2217            dim_v_latitude = dimension
2218        if dimension[:4] == 'TIME':
2219            dim_v_time = dimension
2220
2221
2222    #Precision used by most for lat/lon is 4 or 5 decimals
2223    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2224    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2225
2226    #Check that files are compatible
2227    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2228    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2229    assert allclose(latitudes, e_lat)
2230
2231    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2232    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2233    assert allclose(longitudes, e_lon)
2234
2235    if mint == None:
2236        jmin = 0
2237    else:
2238        jmin = searchsorted(times, mint)
2239
2240    if maxt == None:
2241        jmax=len(times)
2242    else:
2243        jmax = searchsorted(times, maxt)
2244
2245    if minlat == None:
2246        kmin=0
2247    else:
2248        kmin = searchsorted(latitudes, minlat)
2249
2250    if maxlat == None:
2251        kmax = len(latitudes)
2252    else:
2253        kmax = searchsorted(latitudes, maxlat)
2254
2255    if minlon == None:
2256        lmin=0
2257    else:
2258        lmin = searchsorted(longitudes, minlon)
2259
2260    if maxlon == None:
2261        lmax = len(longitudes)
2262    else:
2263        lmax = searchsorted(longitudes, maxlon)
2264
2265#    print' j', jmin, jmax
2266    times = times[jmin:jmax]
2267    latitudes = latitudes[kmin:kmax]
2268    longitudes = longitudes[lmin:lmax]
2269
2270
2271    if verbose: print 'cropping'
2272    zname = 'ELEVATION'
2273
2274    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2275    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2276    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2277    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2278
2279    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2280    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2281    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2282    #        from Numeric import asarray
2283    #        elevations=elevations.tolist()
2284    #        elevations.reverse()
2285    #        elevations=asarray(elevations)
2286    #    else:
2287    #        from Numeric import asarray
2288    #        elevations=elevations.tolist()
2289    #        elevations.reverse()
2290    #        elevations=asarray(elevations)
2291    #        'print hmmm'
2292
2293
2294
2295    #Get missing values
2296    nan_ha = file_h.variables['HA'].missing_value[0]
2297    nan_ua = file_u.variables['UA'].missing_value[0]
2298    nan_va = file_v.variables['VA'].missing_value[0]
2299    if hasattr(file_e.variables[zname],'missing_value'):
2300        nan_e  = file_e.variables[zname].missing_value[0]
2301    else:
2302        nan_e = None
2303
2304    #Cleanup
2305    from Numeric import sometrue
2306
2307    missing = (amplitudes == nan_ha)
2308    if sometrue (missing):
2309        if fail_on_NaN:
2310            msg = 'NetCDFFile %s contains missing values'\
2311                  %(basename_in+'_ha.nc')
2312            raise msg
2313        else:
2314            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2315
2316    missing = (uspeed == nan_ua)
2317    if sometrue (missing):
2318        if fail_on_NaN:
2319            msg = 'NetCDFFile %s contains missing values'\
2320                  %(basename_in+'_ua.nc')
2321            raise msg
2322        else:
2323            uspeed = uspeed*(missing==0) + missing*NaN_filler
2324
2325    missing = (vspeed == nan_va)
2326    if sometrue (missing):
2327        if fail_on_NaN:
2328            msg = 'NetCDFFile %s contains missing values'\
2329                  %(basename_in+'_va.nc')
2330            raise msg
2331        else:
2332            vspeed = vspeed*(missing==0) + missing*NaN_filler
2333
2334
2335    missing = (elevations == nan_e)
2336    if sometrue (missing):
2337        if fail_on_NaN:
2338            msg = 'NetCDFFile %s contains missing values'\
2339                  %(basename_in+'_e.nc')
2340            raise msg
2341        else:
2342            elevations = elevations*(missing==0) + missing*NaN_filler
2343
2344    #######
2345
2346
2347
2348    number_of_times = times.shape[0]
2349    number_of_latitudes = latitudes.shape[0]
2350    number_of_longitudes = longitudes.shape[0]
2351
2352    assert amplitudes.shape[0] == number_of_times
2353    assert amplitudes.shape[1] == number_of_latitudes
2354    assert amplitudes.shape[2] == number_of_longitudes
2355
2356    if verbose:
2357        print '------------------------------------------------'
2358        print 'Statistics:'
2359        print '  Extent (lat/lon):'
2360        print '    lat in [%f, %f], len(lat) == %d'\
2361              %(min(latitudes.flat), max(latitudes.flat),
2362                len(latitudes.flat))
2363        print '    lon in [%f, %f], len(lon) == %d'\
2364              %(min(longitudes.flat), max(longitudes.flat),
2365                len(longitudes.flat))
2366        print '    t in [%f, %f], len(t) == %d'\
2367              %(min(times.flat), max(times.flat), len(times.flat))
2368
2369        q = amplitudes.flat
2370        name = 'Amplitudes (ha) [cm]'
2371        print %s in [%f, %f]' %(name, min(q), max(q))
2372
2373        q = uspeed.flat
2374        name = 'Speeds (ua) [cm/s]'
2375        print %s in [%f, %f]' %(name, min(q), max(q))
2376
2377        q = vspeed.flat
2378        name = 'Speeds (va) [cm/s]'
2379        print %s in [%f, %f]' %(name, min(q), max(q))
2380
2381        q = elevations.flat
2382        name = 'Elevations (e) [m]'
2383        print %s in [%f, %f]' %(name, min(q), max(q))
2384
2385
2386    #print number_of_latitudes, number_of_longitudes
2387    number_of_points = number_of_latitudes*number_of_longitudes
2388    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2389
2390
2391    file_h.close()
2392    file_u.close()
2393    file_v.close()
2394    file_e.close()
2395
2396
2397    # NetCDF file definition
2398    outfile = NetCDFFile(swwname, 'w')
2399
2400    #Create new file
2401    outfile.institution = 'Geoscience Australia'
2402    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2403                          %(basename_in + '_ha.nc',
2404                            basename_in + '_ua.nc',
2405                            basename_in + '_va.nc',
2406                            basename_in + '_e.nc')
2407
2408
2409    #For sww compatibility
2410    outfile.smoothing = 'Yes'
2411    outfile.order = 1
2412
2413    #Start time in seconds since the epoch (midnight 1/1/1970)
2414    outfile.starttime = starttime = times[0]
2415    times = times - starttime  #Store relative times
2416
2417    # dimension definitions
2418    outfile.createDimension('number_of_volumes', number_of_volumes)
2419
2420    outfile.createDimension('number_of_vertices', 3)
2421    outfile.createDimension('number_of_points', number_of_points)
2422
2423
2424    #outfile.createDimension('number_of_timesteps', len(times))
2425    outfile.createDimension('number_of_timesteps', len(times))
2426
2427    # variable definitions
2428    outfile.createVariable('x', precision, ('number_of_points',))
2429    outfile.createVariable('y', precision, ('number_of_points',))
2430    outfile.createVariable('elevation', precision, ('number_of_points',))
2431
2432    #FIXME: Backwards compatibility
2433    outfile.createVariable('z', precision, ('number_of_points',))
2434    #################################
2435
2436    outfile.createVariable('volumes', Int, ('number_of_volumes',
2437                                            'number_of_vertices'))
2438
2439    outfile.createVariable('time', precision,
2440                           ('number_of_timesteps',))
2441
2442    outfile.createVariable('stage', precision,
2443                           ('number_of_timesteps',
2444                            'number_of_points'))
2445
2446    outfile.createVariable('xmomentum', precision,
2447                           ('number_of_timesteps',
2448                            'number_of_points'))
2449
2450    outfile.createVariable('ymomentum', precision,
2451                           ('number_of_timesteps',
2452                            'number_of_points'))
2453
2454
2455    #Store
2456    from coordinate_transforms.redfearn import redfearn
2457    x = zeros(number_of_points, Float)  #Easting
2458    y = zeros(number_of_points, Float)  #Northing
2459
2460
2461    if verbose: print 'Making triangular grid'
2462    #Check zone boundaries
2463    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2464
2465    vertices = {}
2466    i = 0
2467    for k, lat in enumerate(latitudes):       #Y direction
2468        for l, lon in enumerate(longitudes):  #X direction
2469
2470            vertices[l,k] = i
2471
2472            zone, easting, northing = redfearn(lat,lon)
2473
2474            msg = 'Zone boundary crossed at longitude =', lon
2475            #assert zone == refzone, msg
2476            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2477            x[i] = easting
2478            y[i] = northing
2479            i += 1
2480
2481
2482    #Construct 2 triangles per 'rectangular' element
2483    volumes = []
2484    for l in range(number_of_longitudes-1):    #X direction
2485        for k in range(number_of_latitudes-1): #Y direction
2486            v1 = vertices[l,k+1]
2487            v2 = vertices[l,k]
2488            v3 = vertices[l+1,k+1]
2489            v4 = vertices[l+1,k]
2490
2491            volumes.append([v1,v2,v3]) #Upper element
2492            volumes.append([v4,v3,v2]) #Lower element
2493
2494    volumes = array(volumes)
2495
2496    if origin == None:
2497        zone = refzone
2498        xllcorner = min(x)
2499        yllcorner = min(y)
2500    else:
2501        zone = origin[0]
2502        xllcorner = origin[1]
2503        yllcorner = origin[2]
2504
2505
2506    outfile.xllcorner = xllcorner
2507    outfile.yllcorner = yllcorner
2508    outfile.zone = zone
2509
2510
2511    if elevation is not None:
2512        z = elevation
2513    else:
2514        if inverted_bathymetry:
2515            z = -1*elevations
2516        else:
2517            z = elevations
2518        #FIXME: z should be obtained from MOST and passed in here
2519
2520    from Numeric import resize
2521    z = resize(z,outfile.variables['z'][:].shape)
2522    outfile.variables['x'][:] = x - xllcorner
2523    outfile.variables['y'][:] = y - yllcorner
2524    outfile.variables['z'][:] = z             #FIXME HACK
2525    outfile.variables['elevation'][:] = z
2526    outfile.variables['time'][:] = times   #Store time relative
2527    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2528
2529
2530
2531    #Time stepping
2532    stage = outfile.variables['stage']
2533    xmomentum = outfile.variables['xmomentum']
2534    ymomentum = outfile.variables['ymomentum']
2535
2536    if verbose: print 'Converting quantities'
2537    n = len(times)
2538    for j in range(n):
2539        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2540        i = 0
2541        for k in range(number_of_latitudes):      #Y direction
2542            for l in range(number_of_longitudes): #X direction
2543                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2544                stage[j,i] = w
2545                h = w - z[i]
2546                xmomentum[j,i] = uspeed[j,k,l]/100*h
2547                ymomentum[j,i] = vspeed[j,k,l]/100*h
2548                i += 1
2549
2550    #outfile.close()
2551
2552    #FIXME: Refactor using code from file_function.statistics
2553    #Something like print swwstats(swwname)
2554    if verbose:
2555        x = outfile.variables['x'][:]
2556        y = outfile.variables['y'][:]
2557        times = outfile.variables['time'][:]
2558        print '------------------------------------------------'
2559        print 'Statistics of output file:'
2560        print '  Name: %s' %swwname
2561        print '  Reference:'
2562        print '    Lower left corner: [%f, %f]'\
2563              %(xllcorner, yllcorner)
2564        print '    Start time: %f' %starttime
2565        print '  Extent:'
2566        print '    x [m] in [%f, %f], len(x) == %d'\
2567              %(min(x.flat), max(x.flat), len(x.flat))
2568        print '    y [m] in [%f, %f], len(y) == %d'\
2569              %(min(y.flat), max(y.flat), len(y.flat))
2570        print '    t [s] in [%f, %f], len(t) == %d'\
2571              %(min(times), max(times), len(times))
2572        print '  Quantities [SI units]:'
2573        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2574            q = outfile.variables[name][:].flat
2575            print '    %s in [%f, %f]' %(name, min(q), max(q))
2576
2577
2578
2579    outfile.close()
2580
2581
2582
2583
2584
2585def timefile2netcdf(filename, quantity_names = None):
2586    """Template for converting typical text files with time series to
2587    NetCDF tms file.
2588
2589
2590    The file format is assumed to be either two fields separated by a comma:
2591
2592        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2593
2594    E.g
2595
2596      31/08/04 00:00:00, 1.328223 0 0
2597      31/08/04 00:15:00, 1.292912 0 0
2598
2599    will provide a time dependent function f(t) with three attributes
2600
2601    filename is assumed to be the rootname with extenisons .txt and .sww
2602    """
2603
2604    import time, calendar
2605    from Numeric import array
2606    from config import time_format
2607    from utilities.numerical_tools import ensure_numeric   
2608
2609
2610
2611    fid = open(filename + '.txt')
2612    line = fid.readline()
2613    fid.close()
2614
2615    fields = line.split(',')
2616    msg = 'File %s must have the format date, value0 value1 value2 ...'
2617    assert len(fields) == 2, msg
2618
2619    try:
2620        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2621    except ValueError:
2622        msg = 'First field in file %s must be' %filename
2623        msg += ' date-time with format %s.\n' %time_format
2624        msg += 'I got %s instead.' %fields[0]
2625        raise msg
2626
2627
2628    #Split values
2629    values = []
2630    for value in fields[1].split():
2631        values.append(float(value))
2632
2633    q = ensure_numeric(values)
2634
2635    msg = 'ERROR: File must contain at least one independent value'
2636    assert len(q.shape) == 1, msg
2637
2638
2639
2640    #Read times proper
2641    from Numeric import zeros, Float, alltrue
2642    from config import time_format
2643    import time, calendar
2644
2645    fid = open(filename + '.txt')
2646    lines = fid.readlines()
2647    fid.close()
2648
2649    N = len(lines)
2650    d = len(q)
2651
2652    T = zeros(N, Float)       #Time
2653    Q = zeros((N, d), Float)  #Values
2654
2655    for i, line in enumerate(lines):
2656        fields = line.split(',')
2657        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2658
2659        T[i] = realtime - starttime
2660
2661        for j, value in enumerate(fields[1].split()):
2662            Q[i, j] = float(value)
2663
2664    msg = 'File %s must list time as a monotonuosly ' %filename
2665    msg += 'increasing sequence'
2666    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2667
2668
2669    #Create NetCDF file
2670    from Scientific.IO.NetCDF import NetCDFFile
2671
2672    fid = NetCDFFile(filename + '.tms', 'w')
2673
2674
2675    fid.institution = 'Geoscience Australia'
2676    fid.description = 'Time series'
2677
2678
2679    #Reference point
2680    #Start time in seconds since the epoch (midnight 1/1/1970)
2681    #FIXME: Use Georef
2682    fid.starttime = starttime
2683
2684
2685    # dimension definitions
2686    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2687    #fid.createDimension('number_of_vertices', 3)
2688
2689
2690    fid.createDimension('number_of_timesteps', len(T))
2691
2692    fid.createVariable('time', Float, ('number_of_timesteps',))
2693
2694    fid.variables['time'][:] = T
2695
2696    for i in range(Q.shape[1]):
2697        try:
2698            name = quantity_names[i]
2699        except:
2700            name = 'Attribute%d'%i
2701
2702        fid.createVariable(name, Float, ('number_of_timesteps',))
2703        fid.variables[name][:] = Q[:,i]
2704
2705    fid.close()
2706
2707
2708def extent_sww(file_name):
2709    """
2710    Read in an sww file.
2711
2712    Input;
2713    file_name - the sww file
2714
2715    Output;
2716    z - Vector of bed elevation
2717    volumes - Array.  Each row has 3 values, representing
2718    the vertices that define the volume
2719    time - Vector of the times where there is stage information
2720    stage - array with respect to time and vertices (x,y)
2721    """
2722
2723
2724    from Scientific.IO.NetCDF import NetCDFFile
2725
2726    #Check contents
2727    #Get NetCDF
2728    fid = NetCDFFile(file_name, 'r')
2729
2730    # Get the variables
2731    x = fid.variables['x'][:]
2732    y = fid.variables['y'][:]
2733    stage = fid.variables['stage'][:]
2734    #print "stage",stage
2735    #print "stage.shap",stage.shape
2736    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2737    #print "min(stage)",min(stage)
2738
2739    fid.close()
2740
2741    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2742
2743
2744def sww2domain(filename,boundary=None,t=None,\
2745               fail_if_NaN=True,NaN_filler=0\
2746               ,verbose = False,very_verbose = False):
2747    """
2748    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2749
2750    Boundary is not recommended if domain.smooth is not selected, as it
2751    uses unique coordinates, but not unique boundaries. This means that
2752    the boundary file will not be compatable with the coordinates, and will
2753    give a different final boundary, or crash.
2754    """
2755    NaN=9.969209968386869e+036
2756    #initialise NaN.
2757
2758    from Scientific.IO.NetCDF import NetCDFFile
2759    from shallow_water import Domain
2760    from Numeric import asarray, transpose, resize
2761
2762    if verbose: print 'Reading from ', filename
2763    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2764    time = fid.variables['time']       #Timesteps
2765    if t is None:
2766        t = time[-1]
2767    time_interp = get_time_interp(time,t)
2768
2769    # Get the variables as Numeric arrays
2770    x = fid.variables['x'][:]             #x-coordinates of vertices
2771    y = fid.variables['y'][:]             #y-coordinates of vertices
2772    elevation = fid.variables['elevation']     #Elevation
2773    stage = fid.variables['stage']     #Water level
2774    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2775    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2776
2777    starttime = fid.starttime[0]
2778    volumes = fid.variables['volumes'][:] #Connectivity
2779    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2780
2781    conserved_quantities = []
2782    interpolated_quantities = {}
2783    other_quantities = []
2784
2785    # get geo_reference
2786    #sww files don't have to have a geo_ref
2787    try:
2788        geo_reference = Geo_reference(NetCDFObject=fid)
2789    except: #AttributeError, e:
2790        geo_reference = None
2791
2792    if verbose: print '    getting quantities'
2793    for quantity in fid.variables.keys():
2794        dimensions = fid.variables[quantity].dimensions
2795        if 'number_of_timesteps' in dimensions:
2796            conserved_quantities.append(quantity)
2797            interpolated_quantities[quantity]=\
2798                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2799        else: other_quantities.append(quantity)
2800
2801    other_quantities.remove('x')
2802    other_quantities.remove('y')
2803    other_quantities.remove('z')
2804    other_quantities.remove('volumes')
2805
2806    conserved_quantities.remove('time')
2807
2808    if verbose: print '    building domain'
2809    #    From domain.Domain:
2810    #    domain = Domain(coordinates, volumes,\
2811    #                    conserved_quantities = conserved_quantities,\
2812    #                    other_quantities = other_quantities,zone=zone,\
2813    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2814
2815    #   From shallow_water.Domain:
2816    coordinates=coordinates.tolist()
2817    volumes=volumes.tolist()
2818    #FIXME:should this be in mesh?(peter row)
2819    if fid.smoothing == 'Yes': unique = False
2820    else: unique = True
2821    if unique:
2822        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2823
2824
2825    try:   
2826        domain = Domain(coordinates, volumes, boundary)
2827    except AssertionError, e:
2828        fid.close()
2829        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2830        raise msg
2831
2832    if not boundary is None:
2833        domain.boundary = boundary
2834
2835    domain.geo_reference = geo_reference
2836
2837    domain.starttime=float(starttime)+float(t)
2838    domain.time=0.0
2839
2840    for quantity in other_quantities:
2841        try:
2842            NaN = fid.variables[quantity].missing_value
2843        except:
2844            pass #quantity has no missing_value number
2845        X = fid.variables[quantity][:]
2846        if very_verbose:
2847            print '       ',quantity
2848            print '        NaN =',NaN
2849            print '        max(X)'
2850            print '       ',max(X)
2851            print '        max(X)==NaN'
2852            print '       ',max(X)==NaN
2853            print ''
2854        if (max(X)==NaN) or (min(X)==NaN):
2855            if fail_if_NaN:
2856                msg = 'quantity "%s" contains no_data entry'%quantity
2857                raise msg
2858            else:
2859                data = (X<>NaN)
2860                X = (X*data)+(data==0)*NaN_filler
2861        if unique:
2862            X = resize(X,(len(X)/3,3))
2863        domain.set_quantity(quantity,X)
2864    #
2865    for quantity in conserved_quantities:
2866        try:
2867            NaN = fid.variables[quantity].missing_value
2868        except:
2869            pass #quantity has no missing_value number
2870        X = interpolated_quantities[quantity]
2871        if very_verbose:
2872            print '       ',quantity
2873            print '        NaN =',NaN
2874            print '        max(X)'
2875            print '       ',max(X)
2876            print '        max(X)==NaN'
2877            print '       ',max(X)==NaN
2878            print ''
2879        if (max(X)==NaN) or (min(X)==NaN):
2880            if fail_if_NaN:
2881                msg = 'quantity "%s" contains no_data entry'%quantity
2882                raise msg
2883            else:
2884                data = (X<>NaN)
2885                X = (X*data)+(data==0)*NaN_filler
2886        if unique:
2887            X = resize(X,(X.shape[0]/3,3))
2888        domain.set_quantity(quantity,X)
2889       
2890    fid.close()
2891    return domain
2892
2893def interpolated_quantity(saved_quantity,time_interp):
2894
2895    #given an index and ratio, interpolate quantity with respect to time.
2896    index,ratio = time_interp
2897    Q = saved_quantity
2898    if ratio > 0:
2899        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2900    else:
2901        q = Q[index]
2902    #Return vector of interpolated values
2903    return q
2904
2905def get_time_interp(time,t=None):
2906    #Finds the ratio and index for time interpolation.
2907    #It is borrowed from previous pyvolution code.
2908    if t is None:
2909        t=time[-1]
2910        index = -1
2911        ratio = 0.
2912    else:
2913        T = time
2914        tau = t
2915        index=0
2916        msg = 'Time interval derived from file %s [%s:%s]'\
2917            %('FIXMEfilename', T[0], T[-1])
2918        msg += ' does not match model time: %s' %tau
2919        if tau < time[0]: raise msg
2920        if tau > time[-1]: raise msg
2921        while tau > time[index]: index += 1
2922        while tau < time[index]: index -= 1
2923        if tau == time[index]:
2924            #Protect against case where tau == time[-1] (last time)
2925            # - also works in general when tau == time[i]
2926            ratio = 0
2927        else:
2928            #t is now between index and index+1
2929            ratio = (tau - time[index])/(time[index+1] - time[index])
2930    return (index,ratio)
2931
2932
2933def weed(coordinates,volumes,boundary = None):
2934    if type(coordinates)=='array':
2935        coordinates = coordinates.tolist()
2936    if type(volumes)=='array':
2937        volumes = volumes.tolist()
2938
2939    unique = False
2940    point_dict = {}
2941    same_point = {}
2942    for i in range(len(coordinates)):
2943        point = tuple(coordinates[i])
2944        if point_dict.has_key(point):
2945            unique = True
2946            same_point[i]=point
2947            #to change all point i references to point j
2948        else:
2949            point_dict[point]=i
2950            same_point[i]=point
2951
2952    coordinates = []
2953    i = 0
2954    for point in point_dict.keys():
2955        point = tuple(point)
2956        coordinates.append(list(point))
2957        point_dict[point]=i
2958        i+=1
2959
2960
2961    for volume in volumes:
2962        for i in range(len(volume)):
2963            index = volume[i]
2964            if index>-1:
2965                volume[i]=point_dict[same_point[index]]
2966
2967    new_boundary = {}
2968    if not boundary is None:
2969        for segment in boundary.keys():
2970            point0 = point_dict[same_point[segment[0]]]
2971            point1 = point_dict[same_point[segment[1]]]
2972            label = boundary[segment]
2973            #FIXME should the bounday attributes be concaterated
2974            #('exterior, pond') or replaced ('pond')(peter row)
2975
2976            if new_boundary.has_key((point0,point1)):
2977                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2978                                              #+','+label
2979
2980            elif new_boundary.has_key((point1,point0)):
2981                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2982                                              #+','+label
2983            else: new_boundary[(point0,point1)]=label
2984
2985        boundary = new_boundary
2986
2987    return coordinates,volumes,boundary
2988
2989
2990def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2991                 verbose=False):
2992    """Read Digitial Elevation model from the following NetCDF format (.dem)
2993
2994    Example:
2995
2996    ncols         3121
2997    nrows         1800
2998    xllcorner     722000
2999    yllcorner     5893000
3000    cellsize      25
3001    NODATA_value  -9999
3002    138.3698 137.4194 136.5062 135.5558 ..........
3003
3004    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3005    """
3006
3007    import os
3008    from Scientific.IO.NetCDF import NetCDFFile
3009    from Numeric import Float, zeros, sum, reshape, equal
3010
3011    root = basename_in
3012    inname = root + '.dem'
3013
3014    #Open existing netcdf file to read
3015    infile = NetCDFFile(inname, 'r')
3016    if verbose: print 'Reading DEM from %s' %inname
3017
3018    #Read metadata
3019    ncols = infile.ncols[0]
3020    nrows = infile.nrows[0]
3021    xllcorner = infile.xllcorner[0]
3022    yllcorner = infile.yllcorner[0]
3023    cellsize = infile.cellsize[0]
3024    NODATA_value = infile.NODATA_value[0]
3025    zone = infile.zone[0]
3026    false_easting = infile.false_easting[0]
3027    false_northing = infile.false_northing[0]
3028    projection = infile.projection
3029    datum = infile.datum
3030    units = infile.units
3031
3032    dem_elevation = infile.variables['elevation']
3033
3034    #Get output file name
3035    if basename_out == None:
3036        outname = root + '_' + repr(cellsize_new) + '.dem'
3037    else:
3038        outname = basename_out + '.dem'
3039
3040    if verbose: print 'Write decimated NetCDF file to %s' %outname
3041
3042    #Determine some dimensions for decimated grid
3043    (nrows_stencil, ncols_stencil) = stencil.shape
3044    x_offset = ncols_stencil / 2
3045    y_offset = nrows_stencil / 2
3046    cellsize_ratio = int(cellsize_new / cellsize)
3047    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3048    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3049
3050    #Open netcdf file for output
3051    outfile = NetCDFFile(outname, 'w')
3052
3053    #Create new file
3054    outfile.institution = 'Geoscience Australia'
3055    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3056                           'of spatial point data'
3057    #Georeferencing
3058    outfile.zone = zone
3059    outfile.projection = projection
3060    outfile.datum = datum
3061    outfile.units = units
3062
3063    outfile.cellsize = cellsize_new
3064    outfile.NODATA_value = NODATA_value
3065    outfile.false_easting = false_easting
3066    outfile.false_northing = false_northing
3067
3068    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3069    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3070    outfile.ncols = ncols_new
3071    outfile.nrows = nrows_new
3072
3073    # dimension definition
3074    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3075
3076    # variable definition
3077    outfile.createVariable('elevation', Float, ('number_of_points',))
3078
3079    # Get handle to the variable
3080    elevation = outfile.variables['elevation']
3081
3082    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3083
3084    #Store data
3085    global_index = 0
3086    for i in range(nrows_new):
3087        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3088        lower_index = global_index
3089        telev =  zeros(ncols_new, Float)
3090        local_index = 0
3091        trow = i * cellsize_ratio
3092
3093        for j in range(ncols_new):
3094            tcol = j * cellsize_ratio
3095            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3096
3097            #if dem contains 1 or more NODATA_values set value in
3098            #decimated dem to NODATA_value, else compute decimated
3099            #value using stencil
3100            if sum(sum(equal(tmp, NODATA_value))) > 0:
3101                telev[local_index] = NODATA_value
3102            else:
3103                telev[local_index] = sum(sum(tmp * stencil))
3104
3105            global_index += 1
3106            local_index += 1
3107
3108        upper_index = global_index
3109
3110        elevation[lower_index:upper_index] = telev
3111
3112    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3113
3114    infile.close()
3115    outfile.close()
3116
3117
3118
3119
3120def tsh2sww(filename, verbose=False): #test_tsh2sww
3121    """
3122    to check if a tsh/msh file 'looks' good.
3123    """
3124
3125    #FIXME Move to data_manager
3126    from shallow_water import Domain
3127    from pmesh2domain import pmesh_to_domain_instance
3128    import time, os
3129    from data_manager import get_dataobject
3130    from os import sep, path
3131    from utilities.numerical_tools import mean
3132
3133    if verbose == True:print 'Creating domain from', filename
3134    domain = pmesh_to_domain_instance(filename, Domain)
3135    if verbose == True:print "Number of triangles = ", len(domain)
3136
3137    domain.smooth = True
3138    domain.format = 'sww'   #Native netcdf visualisation format
3139    file_path, filename = path.split(filename)
3140    filename, ext = path.splitext(filename)
3141    domain.filename = filename
3142    domain.reduction = mean
3143    if verbose == True:print "file_path",file_path
3144    if file_path == "":file_path = "."
3145    domain.set_datadir(file_path)
3146
3147    if verbose == True:
3148        print "Output written to " + domain.get_datadir() + sep + \
3149              domain.filename + "." + domain.format
3150    sww = get_dataobject(domain)
3151    sww.store_connectivity()
3152    sww.store_timestep('stage')
3153
3154
3155def asc_csiro2sww(bath_dir,
3156                  elevation_dir,
3157                  ucur_dir,
3158                  vcur_dir,
3159                  sww_file,
3160                  minlat = None, maxlat = None,
3161                  minlon = None, maxlon = None,
3162                  zscale=1,
3163                  mean_stage = 0,
3164                  fail_on_NaN = True,
3165                  elevation_NaN_filler = 0,
3166                  bath_prefix='ba',
3167                  elevation_prefix='el',
3168                  verbose=False):
3169    """
3170    Produce an sww boundary file, from esri ascii data from CSIRO.
3171
3172    Also convert latitude and longitude to UTM. All coordinates are
3173    assumed to be given in the GDA94 datum.
3174
3175    assume:
3176    All files are in esri ascii format
3177
3178    4 types of information
3179    bathymetry
3180    elevation
3181    u velocity
3182    v velocity
3183
3184    Assumptions
3185    The metadata of all the files is the same
3186    Each type is in a seperate directory
3187    One bath file with extention .000
3188    The time period is less than 24hrs and uniform.
3189    """
3190    from Scientific.IO.NetCDF import NetCDFFile
3191
3192    from coordinate_transforms.redfearn import redfearn
3193
3194    precision = Float # So if we want to change the precision its done here
3195
3196    # go in to the bath dir and load the only file,
3197    bath_files = os.listdir(bath_dir)
3198    #print "bath_files",bath_files
3199
3200    #fixme: make more general?
3201    bath_file = bath_files[0]
3202    bath_dir_file =  bath_dir + os.sep + bath_file
3203    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3204    #print "bath_metadata",bath_metadata
3205    #print "bath_grid",bath_grid
3206
3207    #Use the date.time of the bath file as a basis for
3208    #the start time for other files
3209    base_start = bath_file[-12:]
3210
3211    #go into the elevation dir and load the 000 file
3212    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3213                         + base_start
3214    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3215    #print "elevation_dir_file",elevation_dir_file
3216    #print "elevation_grid", elevation_grid
3217
3218    elevation_files = os.listdir(elevation_dir)
3219    ucur_files = os.listdir(ucur_dir)
3220    vcur_files = os.listdir(vcur_dir)
3221
3222    # the first elevation file should be the
3223    # file with the same base name as the bath data
3224    #print "elevation_files[0]",elevation_files[0]
3225    #print "'el' + base_start",'el' + base_start
3226    assert elevation_files[0] == 'el' + base_start
3227
3228    #assert bath_metadata == elevation_metadata
3229
3230
3231
3232    number_of_latitudes = bath_grid.shape[0]
3233    number_of_longitudes = bath_grid.shape[1]
3234    #number_of_times = len(os.listdir(elevation_dir))
3235    #number_of_points = number_of_latitudes*number_of_longitudes
3236    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3237
3238    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3239                  for x in range(number_of_longitudes)]
3240    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3241                 for y in range(number_of_latitudes)]
3242
3243     # reverse order of lat, so the fist lat represents the first grid row
3244    latitudes.reverse()
3245
3246    #print "latitudes - before _get_min_max_indexes",latitudes
3247    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3248                                                 minlat=minlat, maxlat=maxlat,
3249                                                 minlon=minlon, maxlon=maxlon)
3250
3251
3252    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3253    #print "bath_grid",bath_grid
3254    latitudes = latitudes[kmin:kmax]
3255    longitudes = longitudes[lmin:lmax]
3256    number_of_latitudes = len(latitudes)
3257    number_of_longitudes = len(longitudes)
3258    number_of_times = len(os.listdir(elevation_dir))
3259    number_of_points = number_of_latitudes*number_of_longitudes
3260    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3261    #print "number_of_points",number_of_points
3262
3263    #Work out the times
3264    if len(elevation_files) > 1:
3265        # Assume: The time period is less than 24hrs.
3266        time_period = (int(elevation_files[1][-3:]) - \
3267                      int(elevation_files[0][-3:]))*60*60
3268        times = [x*time_period for x in range(len(elevation_files))]
3269    else:
3270        times = [0.0]
3271    #print "times", times
3272    #print "number_of_latitudes", number_of_latitudes
3273    #print "number_of_longitudes", number_of_longitudes
3274    #print "number_of_times", number_of_times
3275    #print "latitudes", latitudes
3276    #print "longitudes", longitudes
3277
3278
3279    if verbose:
3280        print '------------------------------------------------'
3281        print 'Statistics:'
3282        print '  Extent (lat/lon):'
3283        print '    lat in [%f, %f], len(lat) == %d'\
3284              %(min(latitudes), max(latitudes),
3285                len(latitudes))
3286        print '    lon in [%f, %f], len(lon) == %d'\
3287              %(min(longitudes), max(longitudes),
3288                len(longitudes))
3289        print '    t in [%f, %f], len(t) == %d'\
3290              %(min(times), max(times), len(times))
3291
3292    ######### WRITE THE SWW FILE #############
3293    # NetCDF file definition
3294    outfile = NetCDFFile(sww_file, 'w')
3295
3296    #Create new file
3297    outfile.institution = 'Geoscience Australia'
3298    outfile.description = 'Converted from XXX'
3299
3300
3301    #For sww compatibility
3302    outfile.smoothing = 'Yes'
3303    outfile.order = 1
3304
3305    #Start time in seconds since the epoch (midnight 1/1/1970)
3306    outfile.starttime = starttime = times[0]
3307
3308
3309    # dimension definitions
3310    outfile.createDimension('number_of_volumes', number_of_volumes)
3311
3312    outfile.createDimension('number_of_vertices', 3)
3313    outfile.createDimension('number_of_points', number_of_points)
3314    outfile.createDimension('number_of_timesteps', number_of_times)
3315
3316    # variable definitions
3317    outfile.createVariable('x', precision, ('number_of_points',))
3318    outfile.createVariable('y', precision, ('number_of_points',))
3319    outfile.createVariable('elevation', precision, ('number_of_points',))
3320
3321    #FIXME: Backwards compatibility
3322    outfile.createVariable('z', precision, ('number_of_points',))
3323    #################################
3324
3325    outfile.createVariable('volumes', Int, ('number_of_volumes',
3326                                            'number_of_vertices'))
3327
3328    outfile.createVariable('time', precision,
3329                           ('number_of_timesteps',))
3330
3331    outfile.createVariable('stage', precision,
3332                           ('number_of_timesteps',
3333                            'number_of_points'))
3334
3335    outfile.createVariable('xmomentum', precision,
3336                           ('number_of_timesteps',
3337                            'number_of_points'))
3338
3339    outfile.createVariable('ymomentum', precision,
3340                           ('number_of_timesteps',
3341                            'number_of_points'))
3342
3343    #Store
3344    from coordinate_transforms.redfearn import redfearn
3345    x = zeros(number_of_points, Float)  #Easting
3346    y = zeros(number_of_points, Float)  #Northing
3347
3348    if verbose: print 'Making triangular grid'
3349    #Get zone of 1st point.
3350    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3351
3352    vertices = {}
3353    i = 0
3354    for k, lat in enumerate(latitudes):
3355        for l, lon in enumerate(longitudes):
3356
3357            vertices[l,k] = i
3358
3359            zone, easting, northing = redfearn(lat,lon)
3360
3361            msg = 'Zone boundary crossed at longitude =', lon
3362            #assert zone == refzone, msg
3363            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3364            x[i] = easting
3365            y[i] = northing
3366            i += 1
3367
3368
3369    #Construct 2 triangles per 'rectangular' element
3370    volumes = []
3371    for l in range(number_of_longitudes-1):    #X direction
3372        for k in range(number_of_latitudes-1): #Y direction
3373            v1 = vertices[l,k+1]
3374            v2 = vertices[l,k]
3375            v3 = vertices[l+1,k+1]
3376            v4 = vertices[l+1,k]
3377
3378            #Note, this is different to the ferrit2sww code
3379            #since the order of the lats is reversed.
3380            volumes.append([v1,v3,v2]) #Upper element
3381            volumes.append([v4,v2,v3]) #Lower element
3382
3383    volumes = array(volumes)
3384
3385    geo_ref = Geo_reference(refzone,min(x),min(y))
3386    geo_ref.write_NetCDF(outfile)
3387
3388    # This will put the geo ref in the middle
3389    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3390
3391
3392    if verbose:
3393        print '------------------------------------------------'
3394        print 'More Statistics:'
3395        print '  Extent (/lon):'
3396        print '    x in [%f, %f], len(lat) == %d'\
3397              %(min(x), max(x),
3398                len(x))
3399        print '    y in [%f, %f], len(lon) == %d'\
3400              %(min(y), max(y),
3401                len(y))
3402        print 'geo_ref: ',geo_ref
3403
3404    z = resize(bath_grid,outfile.variables['z'][:].shape)
3405    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3406    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3407    outfile.variables['z'][:] = z
3408    outfile.variables['elevation'][:] = z  #FIXME HACK
3409    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3410
3411    # do this to create an ok sww file.
3412    #outfile.variables['time'][:] = [0]   #Store time relative
3413    #outfile.variables['stage'] = z
3414    # put the next line up in the code after outfile.order = 1
3415    #number_of_times = 1
3416
3417    stage = outfile.variables['stage']
3418    xmomentum = outfile.variables['xmomentum']
3419    ymomentum = outfile.variables['ymomentum']
3420
3421    outfile.variables['time'][:] = times   #Store time relative
3422
3423    if verbose: print 'Converting quantities'
3424    n = number_of_times
3425    for j in range(number_of_times):
3426        # load in files
3427        elevation_meta, elevation_grid = \
3428           _read_asc(elevation_dir + os.sep + elevation_files[j])
3429
3430        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3431        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3432
3433        #print "elevation_grid",elevation_grid
3434        #cut matrix to desired size
3435        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3436        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3437        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3438        #print "elevation_grid",elevation_grid
3439        # handle missing values
3440        missing = (elevation_grid == elevation_meta['NODATA_value'])
3441        if sometrue (missing):
3442            if fail_on_NaN:
3443                msg = 'File %s contains missing values'\
3444                      %(elevation_files[j])
3445                raise msg
3446            else:
3447                elevation_grid = elevation_grid*(missing==0) + \
3448                                 missing*elevation_NaN_filler
3449
3450
3451        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3452        i = 0
3453        for k in range(number_of_latitudes):      #Y direction
3454            for l in range(number_of_longitudes): #X direction
3455                w = zscale*elevation_grid[k,l] + mean_stage
3456                stage[j,i] = w
3457                h = w - z[i]
3458                xmomentum[j,i] = u_momentum_grid[k,l]*h
3459                ymomentum[j,i] = v_momentum_grid[k,l]*h
3460                i += 1
3461    outfile.close()
3462
3463def _get_min_max_indexes(latitudes,longitudes,
3464                        minlat=None, maxlat=None,
3465                        minlon=None, maxlon=None):
3466    """
3467    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3468    specified with min/max lat/long
3469
3470    Think of the latitudes and longitudes describing a 2d surface.
3471    The area returned is, if possible, just big enough to cover the
3472    inputed max/min area. (This will not be possible if the max/min area
3473    has a section outside of the latitudes/longitudes area.)
3474
3475    assume latitudess & longitudes are sorted,
3476    long - from low to high (west to east, eg 148 - 151)
3477    lat - from high to low (north to south, eg -35 - -38)
3478    """
3479
3480    # reverse order of lat, so it's in ascending order
3481    latitudes.reverse()
3482    largest_lat_index = len(latitudes)-1
3483    #Cut out a smaller extent.
3484    if minlat == None:
3485        lat_min_index = 0
3486    else:
3487        lat_min_index = searchsorted(latitudes, minlat)-1
3488        if lat_min_index <0:
3489            lat_min_index = 0
3490
3491
3492    if maxlat == None:
3493        lat_max_index = largest_lat_index #len(latitudes)
3494    else:
3495        lat_max_index = searchsorted(latitudes, maxlat)
3496        if lat_max_index > largest_lat_index:
3497            lat_max_index = largest_lat_index
3498
3499    if minlon == None:
3500        lon_min_index = 0
3501    else:
3502        lon_min_index = searchsorted(longitudes, minlon)-1
3503        if lon_min_index <0:
3504            lon_min_index = 0
3505
3506    if maxlon == None:
3507        lon_max_index = len(longitudes)
3508    else:
3509        lon_max_index = searchsorted(longitudes, maxlon)
3510
3511    #Take into account that the latitude list was reversed
3512    latitudes.reverse() # Python passes by reference, need to swap back
3513    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3514                                   largest_lat_index - lat_min_index
3515    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3516    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3517
3518    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3519
3520
3521def _read_asc(filename, verbose=False):
3522    """Read esri file from the following ASCII format (.asc)
3523
3524    Example:
3525
3526    ncols         3121
3527    nrows         1800
3528    xllcorner     722000
3529    yllcorner     5893000
3530    cellsize      25
3531    NODATA_value  -9999
3532    138.3698 137.4194 136.5062 135.5558 ..........
3533
3534    """
3535
3536    datafile = open(filename)
3537
3538    if verbose: print 'Reading DEM from %s' %(filename)
3539    lines = datafile.readlines()
3540    datafile.close()
3541
3542    if verbose: print 'Got', len(lines), ' lines'
3543
3544    ncols = int(lines.pop(0).split()[1].strip())
3545    nrows = int(lines.pop(0).split()[1].strip())
3546    xllcorner = float(lines.pop(0).split()[1].strip())
3547    yllcorner = float(lines.pop(0).split()[1].strip())
3548    cellsize = float(lines.pop(0).split()[1].strip())
3549    NODATA_value = float(lines.pop(0).split()[1].strip())
3550
3551    assert len(lines) == nrows
3552
3553    #Store data
3554    grid = []
3555
3556    n = len(lines)
3557    for i, line in enumerate(lines):
3558        cells = line.split()
3559        assert len(cells) == ncols
3560        grid.append(array([float(x) for x in cells]))
3561    grid = array(grid)
3562
3563    return {'xllcorner':xllcorner,
3564            'yllcorner':yllcorner,
3565            'cellsize':cellsize,
3566            'NODATA_value':NODATA_value}, grid
3567
3568def sww2timeseries(swwfile,
3569                   gauge_filename,
3570                   gauge_data_outname,
3571                   quantity = None,
3572                   time_min = None,
3573                   time_max = None,
3574                   verbose = False):
3575
3576    """Read SWW file and extract time series for prescribed quantities at
3577    gauge locations.
3578
3579    The gauge locations are defined in gauge_filename. This file should be
3580    in the form: gaugename, easting, northing, and should be stored in a
3581    .csv or .xya file.
3582
3583    Time series data at the gauges can be written to file (for example,
3584    Benfield requested raw data) with the default being no data written.
3585
3586    The parameter quantity must be the name of an existing quantity or
3587    an expression involving existing quantities. The default is
3588    'depth'.
3589
3590    The user can define a list of quantities. The possibilities are
3591    the conserved quantitues of the shallow water wave equation and other
3592    quantities which can be derived from those, i.e.
3593    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
3594   
3595    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
3596    Note, units of momentum are m^2/s and depth is m.
3597
3598    Velocity is absolute momentum divided by depth. (Confirming correct units:
3599    vel = abs mom / depth = (m^2/s)/m = m/s.)
3600
3601    Bearing returns the angle of the velocity vector from North.
3602
3603    If time_min and time_max is given, output plotted in that time range.
3604    The default is to plot the entire range of the time evident in sww file.
3605
3606    The export graphic format in 'png' and will be stored in the same
3607    directory as the input file.
3608    """
3609
3610    if quantity is None: quantity = 'depth'
3611
3612    # extract gauge locations from gauge file
3613   
3614    # extract all quantities from swwfile (f = file_function)
3615    if time_min is None: time_min = min(f.T)
3616    if time_max is None: time_max = max(f.T)
3617   
3618    # loop through appropriate range of time
3619    # plot prescribed quantities and export data if requested
3620   
3621    #if gauge_data_outname is None:
3622
3623    return
Note: See TracBrowser for help on using the repository browser.