source: inundation/pyvolution/data_manager.py @ 2516

Last change on this file since 2516 was 2516, checked in by ole, 18 years ago

Lots of clean up of obsolete stuff

File size: 107.2 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58
59def make_filename(s):
60    """Transform argument string into a suitable filename
61    """
62
63    s = s.strip()
64    s = s.replace(' ', '_')
65    s = s.replace('(', '')
66    s = s.replace(')', '')
67    s = s.replace('__', '_')
68
69    return s
70
71
72def check_dir(path, verbose=None):
73    """Check that specified path exists.
74    If path does not exist it will be created if possible
75
76    USAGE:
77       checkdir(path, verbose):
78
79    ARGUMENTS:
80        path -- Directory
81        verbose -- Flag verbose output (default: None)
82
83    RETURN VALUE:
84        Verified path including trailing separator
85
86    """
87
88    import os, sys
89    import os.path
90
91    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
92        unix = 0
93    else:
94        unix = 1
95
96
97    if path[-1] != os.sep:
98        path = path + os.sep  # Add separator for directories
99
100    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
101    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
102        try:
103            exitcode=os.mkdir(path)
104
105            # Change access rights if possible
106            #
107            if unix:
108                exitcode=os.system('chmod 775 '+path)
109            else:
110                pass  # FIXME: What about acces rights under Windows?
111
112            if verbose: print 'MESSAGE: Directory', path, 'created.'
113
114        except:
115            print 'WARNING: Directory', path, 'could not be created.'
116            if unix:
117                path = '/tmp/'
118            else:
119                path = 'C:'
120
121            print 'Using directory %s instead' %path
122
123    return(path)
124
125
126
127def del_dir(path):
128    """Recursively delete directory path and all its contents
129    """
130
131    import os
132
133    if os.path.isdir(path):
134        for file in os.listdir(path):
135            X = os.path.join(path, file)
136
137
138            if os.path.isdir(X) and not os.path.islink(X):
139                del_dir(X)
140            else:
141                try:
142                    os.remove(X)
143                except:
144                    print "Could not remove file %s" %X
145
146        os.rmdir(path)
147
148
149
150def create_filename(datadir, filename, format, size=None, time=None):
151
152    import os
153    #from config import data_dir
154
155    FN = check_dir(datadir) + filename
156
157    if size is not None:
158        FN += '_size%d' %size
159
160    if time is not None:
161        FN += '_time%.2f' %time
162
163    FN += '.' + format
164    return FN
165
166
167def get_files(datadir, filename, format, size):
168    """Get all file (names) with gven name, size and format
169    """
170
171    import glob
172
173    import os
174    #from config import data_dir
175
176    dir = check_dir(datadir)
177
178    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
179    return glob.glob(pattern)
180
181
182
183#Generic class for storing output to e.g. visualisation or checkpointing
184class Data_format:
185    """Generic interface to data formats
186    """
187
188
189    def __init__(self, domain, extension, mode = 'w'):
190        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
191                                        '''   'w' (write)'''+\
192                                        '''   'r' (read)''' +\
193                                        '''   'a' (append)'''
194
195        #Create filename
196        #self.filename = create_filename(domain.get_datadir(),
197        #domain.get_name(), extension, len(domain))
198
199
200        self.filename = create_filename(domain.get_datadir(),
201                        domain.get_name(), extension)
202
203        #print 'F', self.filename
204        self.timestep = 0
205        self.number_of_volumes = len(domain)
206        self.domain = domain
207
208
209        #FIXME: Should we have a general set_precision function?
210
211
212
213#Class for storing output to e.g. visualisation
214class Data_format_sww(Data_format):
215    """Interface to native NetCDF format (.sww) for storing model output
216
217    There are two kinds of data
218
219    1: Constant data: Vertex coordinates and field values. Stored once
220    2: Variable data: Conserved quantities. Stored once per timestep.
221
222    All data is assumed to reside at vertex locations.
223    """
224
225
226    def __init__(self, domain, mode = 'w',\
227                 max_size = 2000000000,
228                 recursion = False):
229        from Scientific.IO.NetCDF import NetCDFFile
230        from Numeric import Int, Float, Float32
231
232        self.precision = Float32 #Use single precision
233        if hasattr(domain, 'max_size'):
234            self.max_size = domain.max_size #file size max is 2Gig
235        else:
236            self.max_size = max_size
237        self.recursion = recursion
238        self.mode = mode
239
240        Data_format.__init__(self, domain, 'sww', mode)
241
242
243        # NetCDF file definition
244        fid = NetCDFFile(self.filename, mode)
245
246        if mode == 'w':
247
248            #Create new file
249            fid.institution = 'Geoscience Australia'
250            fid.description = 'Output from pyvolution suitable for plotting'
251
252            if domain.smooth:
253                fid.smoothing = 'Yes'
254            else:
255                fid.smoothing = 'No'
256
257            fid.order = domain.default_order
258
259            if hasattr(domain, 'texture'):
260                fid.texture = domain.texture
261            #else:
262            #    fid.texture = 'None'
263
264            #Reference point
265            #Start time in seconds since the epoch (midnight 1/1/1970)
266            #FIXME: Use Georef
267            fid.starttime = domain.starttime
268
269            # dimension definitions
270            fid.createDimension('number_of_volumes', self.number_of_volumes)
271            fid.createDimension('number_of_vertices', 3)
272
273            if domain.smooth is True:
274                fid.createDimension('number_of_points', len(domain.vertexlist))
275            else:
276                fid.createDimension('number_of_points', 3*self.number_of_volumes)
277
278            fid.createDimension('number_of_timesteps', None) #extensible
279
280            # variable definitions
281            fid.createVariable('x', self.precision, ('number_of_points',))
282            fid.createVariable('y', self.precision, ('number_of_points',))
283            fid.createVariable('elevation', self.precision, ('number_of_points',))
284            if domain.geo_reference is not None:
285                domain.geo_reference.write_NetCDF(fid)
286
287            #FIXME: Backwards compatibility
288            fid.createVariable('z', self.precision, ('number_of_points',))
289            #################################
290
291            fid.createVariable('volumes', Int, ('number_of_volumes',
292                                                'number_of_vertices'))
293
294            fid.createVariable('time', self.precision,
295                               ('number_of_timesteps',))
296
297            fid.createVariable('stage', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('xmomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305            fid.createVariable('ymomentum', self.precision,
306                               ('number_of_timesteps',
307                                'number_of_points'))
308
309        #Close
310        fid.close()
311
312
313    def store_connectivity(self):
314        """Specialisation of store_connectivity for net CDF format
315
316        Writes x,y,z coordinates of triangles constituting
317        the bed elevation.
318        """
319
320        from Scientific.IO.NetCDF import NetCDFFile
321
322        from Numeric import concatenate, Int
323
324        domain = self.domain
325
326        #Get NetCDF
327        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
328
329        # Get the variables
330        x = fid.variables['x']
331        y = fid.variables['y']
332        z = fid.variables['elevation']
333
334        volumes = fid.variables['volumes']
335
336        # Get X, Y and bed elevation Z
337        Q = domain.quantities['elevation']
338        X,Y,Z,V = Q.get_vertex_values(xy=True,
339                                      precision = self.precision)
340
341
342
343        x[:] = X.astype(self.precision)
344        y[:] = Y.astype(self.precision)
345        z[:] = Z.astype(self.precision)
346
347        #FIXME: Backwards compatibility
348        z = fid.variables['z']
349        z[:] = Z.astype(self.precision)
350        ################################
351
352        volumes[:] = V.astype(volumes.typecode())
353
354        #Close
355        fid.close()
356
357    def store_timestep(self, names):
358        """Store time and named quantities to file
359        """
360        from Scientific.IO.NetCDF import NetCDFFile
361        import types
362        from time import sleep
363        from os import stat
364
365        #minimum_allowed_depth = 0.001
366        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
367        from Numeric import choose
368
369        #Get NetCDF
370        retries = 0
371        file_open = False
372        while not file_open and retries < 10:
373            try:
374                fid = NetCDFFile(self.filename, 'a') #Open existing file
375            except IOError:
376                #This could happen if someone was reading the file.
377                #In that case, wait a while and try again
378                msg = 'Warning (store_timestep): File %s could not be opened'\
379                      %self.filename
380                msg += ' - trying step %s again' %self.domain.time
381                print msg
382                retries += 1
383                sleep(1)
384            else:
385                file_open = True
386
387        if not file_open:
388            msg = 'File %s could not be opened for append' %self.filename
389            raise msg
390
391
392
393        #Check to see if the file is already too big:
394        time = fid.variables['time']
395        i = len(time)+1
396        file_size = stat(self.filename)[6]
397        file_size_increase =  file_size/i
398        if file_size + file_size_increase > self.max_size*(2**self.recursion):
399            #in order to get the file name and start time correct,
400            #I change the domian.filename and domain.starttime.
401            #This is the only way to do this without changing
402            #other modules (I think).
403
404            #write a filename addon that won't break swollens reader
405            #(10.sww is bad)
406            filename_ext = '_time_%s'%self.domain.time
407            filename_ext = filename_ext.replace('.', '_')
408            #remember the old filename, then give domain a
409            #name with the extension
410            old_domain_filename = self.domain.filename
411            if not self.recursion:
412                self.domain.filename = self.domain.filename+filename_ext
413
414            #change the domain starttime to the current time
415            old_domain_starttime = self.domain.starttime
416            self.domain.starttime = self.domain.time
417
418            #build a new data_structure.
419            next_data_structure=\
420                Data_format_sww(self.domain, mode=self.mode,\
421                                max_size = self.max_size,\
422                                recursion = self.recursion+1)
423            if not self.recursion:
424                print '    file_size = %s'%file_size
425                print '    saving file to %s'%next_data_structure.filename
426            #set up the new data_structure
427            self.domain.writer = next_data_structure
428
429            #FIXME - could be cleaner to use domain.store_timestep etc.
430            next_data_structure.store_connectivity()
431            next_data_structure.store_timestep(names)
432            fid.sync()
433            fid.close()
434
435            #restore the old starttime and filename
436            self.domain.starttime = old_domain_starttime
437            self.domain.filename = old_domain_filename
438        else:
439            self.recursion = False
440            domain = self.domain
441
442            # Get the variables
443            time = fid.variables['time']
444            stage = fid.variables['stage']
445            xmomentum = fid.variables['xmomentum']
446            ymomentum = fid.variables['ymomentum']
447            i = len(time)
448
449            #Store time
450            time[i] = self.domain.time
451
452
453            if type(names) not in [types.ListType, types.TupleType]:
454                names = [names]
455
456            for name in names:
457                # Get quantity
458                Q = domain.quantities[name]
459                A,V = Q.get_vertex_values(xy = False,
460                                          precision = self.precision)
461
462                #FIXME: Make this general (see below)
463                if name == 'stage':
464                    z = fid.variables['elevation']
465                    #print z[:]
466                    #print A-z[:]
467                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
468                    stage[i,:] = A.astype(self.precision)
469                elif name == 'xmomentum':
470                    xmomentum[i,:] = A.astype(self.precision)
471                elif name == 'ymomentum':
472                    ymomentum[i,:] = A.astype(self.precision)
473
474                #As in....
475                #eval( name + '[i,:] = A.astype(self.precision)' )
476                #FIXME: But we need a UNIT test for that before refactoring
477
478
479
480            #Flush and close
481            fid.sync()
482            fid.close()
483
484
485
486#Class for handling checkpoints data
487class Data_format_cpt(Data_format):
488    """Interface to native NetCDF format (.cpt)
489    """
490
491
492    def __init__(self, domain, mode = 'w'):
493        from Scientific.IO.NetCDF import NetCDFFile
494        from Numeric import Int, Float, Float
495
496        self.precision = Float #Use full precision
497
498        Data_format.__init__(self, domain, 'sww', mode)
499
500
501        # NetCDF file definition
502        fid = NetCDFFile(self.filename, mode)
503
504        if mode == 'w':
505            #Create new file
506            fid.institution = 'Geoscience Australia'
507            fid.description = 'Checkpoint data'
508            #fid.smooth = domain.smooth
509            fid.order = domain.default_order
510
511            # dimension definitions
512            fid.createDimension('number_of_volumes', self.number_of_volumes)
513            fid.createDimension('number_of_vertices', 3)
514
515            #Store info at all vertices (no smoothing)
516            fid.createDimension('number_of_points', 3*self.number_of_volumes)
517            fid.createDimension('number_of_timesteps', None) #extensible
518
519            # variable definitions
520
521            #Mesh
522            fid.createVariable('x', self.precision, ('number_of_points',))
523            fid.createVariable('y', self.precision, ('number_of_points',))
524
525
526            fid.createVariable('volumes', Int, ('number_of_volumes',
527                                                'number_of_vertices'))
528
529            fid.createVariable('time', self.precision,
530                               ('number_of_timesteps',))
531
532            #Allocate space for all quantities
533            for name in domain.quantities.keys():
534                fid.createVariable(name, self.precision,
535                                   ('number_of_timesteps',
536                                    'number_of_points'))
537
538        #Close
539        fid.close()
540
541
542    def store_checkpoint(self):
543        """
544        Write x,y coordinates of triangles.
545        Write connectivity (
546        constituting
547        the bed elevation.
548        """
549
550        from Scientific.IO.NetCDF import NetCDFFile
551
552        from Numeric import concatenate
553
554        domain = self.domain
555
556        #Get NetCDF
557        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
558
559        # Get the variables
560        x = fid.variables['x']
561        y = fid.variables['y']
562
563        volumes = fid.variables['volumes']
564
565        # Get X, Y and bed elevation Z
566        Q = domain.quantities['elevation']
567        X,Y,Z,V = Q.get_vertex_values(xy=True,
568                                      precision = self.precision)
569
570
571
572        x[:] = X.astype(self.precision)
573        y[:] = Y.astype(self.precision)
574        z[:] = Z.astype(self.precision)
575
576        volumes[:] = V
577
578        #Close
579        fid.close()
580
581
582    def store_timestep(self, name):
583        """Store time and named quantity to file
584        """
585        from Scientific.IO.NetCDF import NetCDFFile
586        from time import sleep
587
588        #Get NetCDF
589        retries = 0
590        file_open = False
591        while not file_open and retries < 10:
592            try:
593                fid = NetCDFFile(self.filename, 'a') #Open existing file
594            except IOError:
595                #This could happen if someone was reading the file.
596                #In that case, wait a while and try again
597                msg = 'Warning (store_timestep): File %s could not be opened'\
598                          %self.filename
599                msg += ' - trying again'
600                print msg
601                retries += 1
602                sleep(1)
603            else:
604                file_open = True
605
606            if not file_open:
607                msg = 'File %s could not be opened for append' %self.filename
608            raise msg
609
610
611        domain = self.domain
612
613        # Get the variables
614        time = fid.variables['time']
615        stage = fid.variables['stage']
616        i = len(time)
617
618        #Store stage
619        time[i] = self.domain.time
620
621        # Get quantity
622        Q = domain.quantities[name]
623        A,V = Q.get_vertex_values(xy=False,
624                                  precision = self.precision)
625
626        stage[i,:] = A.astype(self.precision)
627
628        #Flush and close
629        fid.sync()
630        fid.close()
631
632
633
634
635
636#Function for storing xya output
637#FIXME Not done yet for this version
638class Data_format_xya(Data_format):
639    """Generic interface to data formats
640    """
641
642
643    def __init__(self, domain, mode = 'w'):
644        from Scientific.IO.NetCDF import NetCDFFile
645        from Numeric import Int, Float, Float32
646
647        self.precision = Float32 #Use single precision
648
649        Data_format.__init__(self, domain, 'xya', mode)
650
651
652
653    #FIXME -This is the old xya format
654    def store_all(self):
655        """Specialisation of store all for xya format
656
657        Writes x,y,z coordinates of triangles constituting
658        the bed elevation.
659        """
660
661        from Numeric import concatenate
662
663        domain = self.domain
664
665        fd = open(self.filename, 'w')
666
667
668        if domain.smooth is True:
669            number_of_points =  len(domain.vertexlist)
670        else:
671            number_of_points = 3*self.number_of_volumes
672
673        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
674
675        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
676                 " # <vertex #> <x> <y> [attributes]" + "\n")
677
678
679        # Get X, Y, bed elevation and friction (index=0,1)
680        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
681                                           indices = (0,1), precision = self.precision)
682
683        bed_eles = A[:,0]
684        fricts = A[:,1]
685
686        # Get stage (index=0)
687        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
688                                       indices = (0,), precision = self.precision)
689
690        stages = B[:,0]
691
692        #<vertex #> <x> <y> [attributes]
693        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
694                                               stages, fricts):
695
696            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
697            fd.write(s)
698
699        #close
700        fd.close()
701
702
703    def store_timestep(self, t, V0, V1, V2):
704        """Store time, water heights (and momentums) to file
705        """
706        pass
707
708
709#Auxiliary
710def write_obj(filename,x,y,z):
711    """Store x,y,z vectors into filename (obj format)
712       Vectors are assumed to have dimension (M,3) where
713       M corresponds to the number elements.
714       triangles are assumed to be disconnected
715
716       The three numbers in each vector correspond to three vertices,
717
718       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
719
720    """
721    #print 'Writing obj to %s' % filename
722
723    import os.path
724
725    root, ext = os.path.splitext(filename)
726    if ext == '.obj':
727        FN = filename
728    else:
729        FN = filename + '.obj'
730
731
732    outfile = open(FN, 'wb')
733    outfile.write("# Triangulation as an obj file\n")
734
735    M, N = x.shape
736    assert N==3  #Assuming three vertices per element
737
738    for i in range(M):
739        for j in range(N):
740            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
741
742    for i in range(M):
743        base = i*N
744        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
745
746    outfile.close()
747
748
749#########################################################
750#Conversion routines
751########################################################
752
753def sww2obj(basefilename, size):
754    """Convert netcdf based data output to obj
755    """
756    from Scientific.IO.NetCDF import NetCDFFile
757
758    from Numeric import Float, zeros
759
760    #Get NetCDF
761    FN = create_filename('.', basefilename, 'sww', size)
762    print 'Reading from ', FN
763    fid = NetCDFFile(FN, 'r')  #Open existing file for read
764
765
766    # Get the variables
767    x = fid.variables['x']
768    y = fid.variables['y']
769    z = fid.variables['elevation']
770    time = fid.variables['time']
771    stage = fid.variables['stage']
772
773    M = size  #Number of lines
774    xx = zeros((M,3), Float)
775    yy = zeros((M,3), Float)
776    zz = zeros((M,3), Float)
777
778    for i in range(M):
779        for j in range(3):
780            xx[i,j] = x[i+j*M]
781            yy[i,j] = y[i+j*M]
782            zz[i,j] = z[i+j*M]
783
784    #Write obj for bathymetry
785    FN = create_filename('.', basefilename, 'obj', size)
786    write_obj(FN,xx,yy,zz)
787
788
789    #Now read all the data with variable information, combine with
790    #x,y info and store as obj
791
792    for k in range(len(time)):
793        t = time[k]
794        print 'Processing timestep %f' %t
795
796        for i in range(M):
797            for j in range(3):
798                zz[i,j] = stage[k,i+j*M]
799
800
801        #Write obj for variable data
802        #FN = create_filename(basefilename, 'obj', size, time=t)
803        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
804        write_obj(FN,xx,yy,zz)
805
806
807def dat2obj(basefilename):
808    """Convert line based data output to obj
809    FIXME: Obsolete?
810    """
811
812    import glob, os
813    from config import data_dir
814
815
816    #Get bathymetry and x,y's
817    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
818
819    from Numeric import zeros, Float
820
821    M = len(lines)  #Number of lines
822    x = zeros((M,3), Float)
823    y = zeros((M,3), Float)
824    z = zeros((M,3), Float)
825
826    ##i = 0
827    for i, line in enumerate(lines):
828        tokens = line.split()
829        values = map(float,tokens)
830
831        for j in range(3):
832            x[i,j] = values[j*3]
833            y[i,j] = values[j*3+1]
834            z[i,j] = values[j*3+2]
835
836        ##i += 1
837
838
839    #Write obj for bathymetry
840    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
841
842
843    #Now read all the data files with variable information, combine with
844    #x,y info
845    #and store as obj
846
847    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
848
849    for filename in files:
850        print 'Processing %s' % filename
851
852        lines = open(data_dir+os.sep+filename,'r').readlines()
853        assert len(lines) == M
854        root, ext = os.path.splitext(filename)
855
856        #Get time from filename
857        i0 = filename.find('_time=')
858        if i0 == -1:
859            #Skip bathymetry file
860            continue
861
862        i0 += 6  #Position where time starts
863        i1 = filename.find('.dat')
864
865        if i1 > i0:
866            t = float(filename[i0:i1])
867        else:
868            raise 'Hmmmm'
869
870
871
872        ##i = 0
873        for i, line in enumerate(lines):
874            tokens = line.split()
875            values = map(float,tokens)
876
877            for j in range(3):
878                z[i,j] = values[j]
879
880            ##i += 1
881
882        #Write obj for variable data
883        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
884
885
886def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
887    """Read netcdf filename1, pick timesteps first:step:last and save to
888    nettcdf file filename2
889    """
890    from Scientific.IO.NetCDF import NetCDFFile
891
892    #Get NetCDF
893    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
894    outfile = NetCDFFile(filename2, 'w')  #Open new file
895
896
897    #Copy dimensions
898    for d in infile.dimensions:
899        outfile.createDimension(d, infile.dimensions[d])
900
901    for name in infile.variables:
902        var = infile.variables[name]
903        outfile.createVariable(name, var.typecode(), var.dimensions)
904
905
906    #Copy the static variables
907    for name in infile.variables:
908        if name == 'time' or name == 'stage':
909            pass
910        else:
911            #Copy
912            outfile.variables[name][:] = infile.variables[name][:]
913
914    #Copy selected timesteps
915    time = infile.variables['time']
916    stage = infile.variables['stage']
917
918    newtime = outfile.variables['time']
919    newstage = outfile.variables['stage']
920
921    if last is None:
922        last = len(time)
923
924    selection = range(first, last, step)
925    for i, j in enumerate(selection):
926        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
927        newtime[i] = time[j]
928        newstage[i,:] = stage[j,:]
929
930    #Close
931    infile.close()
932    outfile.close()
933
934
935#Get data objects
936def get_dataobject(domain, mode='w'):
937    """Return instance of class of given format using filename
938    """
939
940    cls = eval('Data_format_%s' %domain.format)
941    return cls(domain, mode)
942
943def xya2pts(basename_in, basename_out=None, verbose=False,
944            #easting_min=None, easting_max=None,
945            #northing_min=None, northing_max=None,
946            stride = 1,
947            attribute_name = 'elevation',
948            z_func = None):
949    """Read points data from ascii (.xya)
950
951    Example:
952
953              x(m)        y(m)        z(m)
954         0.00000e+00  0.00000e+00  1.3535000e-01
955         0.00000e+00  1.40000e-02  1.3535000e-01
956
957
958
959    Convert to NetCDF pts format which is
960
961    points:  (Nx2) Float array
962    elevation: N Float array
963
964    Only lines that contain three numeric values are processed
965
966    If z_func is specified, it will be applied to the third field
967    """
968
969    import os
970    #from Scientific.IO.NetCDF import NetCDFFile
971    from Numeric import Float, arrayrange, concatenate
972
973    root, ext = os.path.splitext(basename_in)
974
975    if ext == '': ext = '.xya'
976
977    #Get NetCDF
978    infile = open(root + ext, 'r')  #Open existing xya file for read
979
980    if verbose: print 'Reading xya points from %s' %(root + ext)
981
982    points = []
983    attribute = []
984    for i, line in enumerate(infile.readlines()):
985
986        if i % stride != 0: continue
987
988        fields = line.split()
989
990        try:
991            assert len(fields) == 3
992        except:
993            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
994
995        try:
996            x = float( fields[0] )
997            y = float( fields[1] )
998            z = float( fields[2] )
999        except:
1000            continue
1001
1002        points.append( [x, y] )
1003
1004        if callable(z_func):
1005            attribute.append(z_func(z))
1006        else:
1007            attribute.append(z)
1008
1009
1010    #Get output file
1011    if basename_out == None:
1012        ptsname = root + '.pts'
1013    else:
1014        ptsname = basename_out + '.pts'
1015
1016    if verbose: print 'Store to NetCDF file %s' %ptsname
1017    write_ptsfile(ptsname, points, attribute, attribute_name)
1018
1019
1020
1021######Obsoleted by export_points in load_mesh
1022def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1023                  zone=None, xllcorner=None, yllcorner=None):
1024    """Write points and associated attribute to pts (NetCDF) format
1025    """
1026
1027    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1028
1029    from Numeric import Float
1030
1031    if attribute_name is None:
1032        attribute_name = 'attribute'
1033
1034
1035    from Scientific.IO.NetCDF import NetCDFFile
1036
1037    # NetCDF file definition
1038    outfile = NetCDFFile(ptsname, 'w')
1039
1040
1041    #Create new file
1042    outfile.institution = 'Geoscience Australia'
1043    outfile.description = 'NetCDF pts format for compact and '\
1044                          'portable storage of spatial point data'
1045
1046
1047    #Georeferencing
1048    from coordinate_transforms.geo_reference import Geo_reference
1049    if zone is None:
1050        assert xllcorner is None, 'xllcorner must be None'
1051        assert yllcorner is None, 'yllcorner must be None'       
1052        Geo_reference().write_NetCDF(outfile)
1053    else:
1054        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1055       
1056
1057
1058    outfile.createDimension('number_of_points', len(points))
1059    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1060
1061    # variable definitions
1062    outfile.createVariable('points', Float, ('number_of_points',
1063                                             'number_of_dimensions'))
1064    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1065
1066    # Get handles to the variables
1067    nc_points = outfile.variables['points']
1068    nc_attribute = outfile.variables[attribute_name]
1069
1070    #Store data
1071    nc_points[:, :] = points
1072    nc_attribute[:] = attribute
1073
1074    outfile.close()
1075
1076
1077def dem2pts(basename_in, basename_out=None, 
1078            easting_min=None, easting_max=None,
1079            northing_min=None, northing_max=None,
1080            use_cache=False, verbose=False,):
1081    """Read Digitial Elevation model from the following NetCDF format (.dem)
1082
1083    Example:
1084
1085    ncols         3121
1086    nrows         1800
1087    xllcorner     722000
1088    yllcorner     5893000
1089    cellsize      25
1090    NODATA_value  -9999
1091    138.3698 137.4194 136.5062 135.5558 ..........
1092
1093    Convert to NetCDF pts format which is
1094
1095    points:  (Nx2) Float array
1096    elevation: N Float array
1097    """
1098
1099
1100
1101    kwargs = {'basename_out': basename_out, 
1102              'easting_min': easting_min,
1103              'easting_max': easting_max,
1104              'northing_min': northing_min,
1105              'northing_max': northing_max,
1106              'verbose': verbose}
1107
1108    if use_cache is True:
1109        from caching import cache
1110        result = cache(_dem2pts, basename_in, kwargs,
1111                       dependencies = [basename_in + '.dem'],
1112                       verbose = verbose)
1113
1114    else:
1115        result = apply(_dem2pts, [basename_in], kwargs)
1116       
1117    return result
1118
1119
1120def _dem2pts(basename_in, basename_out=None, verbose=False,
1121            easting_min=None, easting_max=None,
1122            northing_min=None, northing_max=None):
1123    """Read Digitial Elevation model from the following NetCDF format (.dem)
1124
1125    Internal function. See public function dem2pts for details.   
1126    """
1127
1128    #FIXME: Can this be written feasibly using write_pts?
1129
1130    import os
1131    from Scientific.IO.NetCDF import NetCDFFile
1132    from Numeric import Float, zeros, reshape, sum
1133
1134    root = basename_in
1135
1136    #Get NetCDF
1137    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1138
1139    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1140
1141    ncols = infile.ncols[0]
1142    nrows = infile.nrows[0]
1143    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1144    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1145    cellsize = infile.cellsize[0]
1146    NODATA_value = infile.NODATA_value[0]
1147    dem_elevation = infile.variables['elevation']
1148
1149    zone = infile.zone[0]
1150    false_easting = infile.false_easting[0]
1151    false_northing = infile.false_northing[0]
1152
1153    #Text strings
1154    projection = infile.projection
1155    datum = infile.datum
1156    units = infile.units
1157
1158
1159    #Get output file
1160    if basename_out == None:
1161        ptsname = root + '.pts'
1162    else:
1163        ptsname = basename_out + '.pts'
1164
1165    if verbose: print 'Store to NetCDF file %s' %ptsname
1166    # NetCDF file definition
1167    outfile = NetCDFFile(ptsname, 'w')
1168
1169    #Create new file
1170    outfile.institution = 'Geoscience Australia'
1171    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1172                      'of spatial point data'
1173    #assign default values
1174    if easting_min is None: easting_min = xllcorner
1175    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1176    if northing_min is None: northing_min = yllcorner
1177    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1178
1179    #compute offsets to update georeferencing
1180    easting_offset = xllcorner - easting_min
1181    northing_offset = yllcorner - northing_min
1182
1183    #Georeferencing
1184    outfile.zone = zone
1185    outfile.xllcorner = easting_min #Easting of lower left corner
1186    outfile.yllcorner = northing_min #Northing of lower left corner
1187    outfile.false_easting = false_easting
1188    outfile.false_northing = false_northing
1189
1190    outfile.projection = projection
1191    outfile.datum = datum
1192    outfile.units = units
1193
1194
1195    #Grid info (FIXME: probably not going to be used, but heck)
1196    outfile.ncols = ncols
1197    outfile.nrows = nrows
1198
1199    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1200    totalnopoints = nrows*ncols
1201
1202    # calculating number of NODATA_values for each row
1203    nn = 0
1204    no_counts = []
1205    for i in range(nrows):
1206        v = dem_elevation_r[i,:]
1207        this_count = sum(v == NODATA_value)
1208        nn += this_count
1209        no_counts.append(this_count)
1210
1211    if verbose and nn > 0:
1212        print 'There are %d values in the elevation' %totalnopoints
1213        print 'There are %d NODATA_values in the elevation' %nn
1214   
1215    # dimension definitions
1216    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1217    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1218   
1219    nopoints = nrows_in_bounding_box*ncols_in_bounding_box-nn
1220    #nopoints = nrows_in_bounding_box*ncols_in_bounding_box
1221    outfile.createDimension('number_of_points', nopoints)
1222    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1223
1224    # variable definitions
1225    outfile.createVariable('points', Float, ('number_of_points',
1226                                             'number_of_dimensions'))
1227    outfile.createVariable('elevation', Float, ('number_of_points',))
1228
1229    # Get handles to the variables
1230    points = outfile.variables['points']
1231    elevation = outfile.variables['elevation']
1232
1233    #Store data
1234    global_index = 0
1235    for i in range(nrows):
1236        if verbose and i%((nrows+10)/10)==0:
1237            print 'Processing row %d of %d' %(i, nrows)
1238
1239        lower_index = global_index
1240       
1241        no_NODATA = no_counts[i]
1242        if no_NODATA > 0:
1243            newcols = ncols_in_bounding_box - no_NODATA
1244        else:
1245            newcols = ncols_in_bounding_box
1246       
1247        telev = zeros(newcols, Float)
1248        tpoints = zeros((newcols, 2), Float)
1249           
1250
1251        local_index = 0
1252       
1253        y = (nrows-i)*cellsize + yllcorner
1254        for j in range(ncols):
1255
1256            x = j*cellsize + xllcorner
1257            if easting_min <= x <= easting_max and \
1258               northing_min <= y <= northing_max and \
1259               dem_elevation_r[i,j] <> NODATA_value:
1260                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1261                telev[local_index] = dem_elevation_r[i, j]
1262                global_index += 1
1263                local_index += 1
1264               
1265        upper_index = global_index
1266
1267        if upper_index == lower_index + newcols: 
1268            points[lower_index:upper_index, :] = tpoints
1269            elevation[lower_index:upper_index] = telev
1270
1271    assert global_index == nopoints, 'index not equal to number of points'
1272
1273    infile.close()
1274    outfile.close()
1275
1276
1277
1278def _read_hecras_cross_sections(lines):
1279    """Return block of surface lines for each cross section
1280    Starts with SURFACE LINE,
1281    Ends with END CROSS-SECTION
1282    """
1283
1284    points = []
1285
1286    reading_surface = False
1287    for i, line in enumerate(lines):
1288
1289        if len(line.strip()) == 0:    #Ignore blanks
1290            continue
1291
1292        if lines[i].strip().startswith('SURFACE LINE'):
1293            reading_surface = True
1294            continue
1295
1296        if lines[i].strip().startswith('END') and reading_surface:
1297            yield points
1298            reading_surface = False
1299            points = []
1300
1301        if reading_surface:
1302            fields = line.strip().split(',')
1303            easting = float(fields[0])
1304            northing = float(fields[1])
1305            elevation = float(fields[2])
1306            points.append([easting, northing, elevation])
1307
1308
1309
1310
1311def hecras_cross_sections2pts(basename_in,
1312                              basename_out=None,
1313                              verbose=False):
1314    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1315
1316    Example:
1317
1318
1319# RAS export file created on Mon 15Aug2005 11:42
1320# by HEC-RAS Version 3.1.1
1321
1322BEGIN HEADER:
1323  UNITS: METRIC
1324  DTM TYPE: TIN
1325  DTM: v:\1\cit\perth_topo\river_tin
1326  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1327  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1328  MAP PROJECTION: UTM
1329  PROJECTION ZONE: 50
1330  DATUM: AGD66
1331  VERTICAL DATUM:
1332  NUMBER OF REACHES:  19
1333  NUMBER OF CROSS-SECTIONS:  14206
1334END HEADER:
1335
1336
1337Only the SURFACE LINE data of the following form will be utilised
1338
1339  CROSS-SECTION:
1340    STREAM ID:Southern-Wungong
1341    REACH ID:Southern-Wungong
1342    STATION:19040.*
1343    CUT LINE:
1344      405548.671603161 , 6438142.7594925
1345      405734.536092045 , 6438326.10404912
1346      405745.130459356 , 6438331.48627354
1347      405813.89633823 , 6438368.6272789
1348    SURFACE LINE:
1349     405548.67,   6438142.76,   35.37
1350     405552.24,   6438146.28,   35.41
1351     405554.78,   6438148.78,   35.44
1352     405555.80,   6438149.79,   35.44
1353     405559.37,   6438153.31,   35.45
1354     405560.88,   6438154.81,   35.44
1355     405562.93,   6438156.83,   35.42
1356     405566.50,   6438160.35,   35.38
1357     405566.99,   6438160.83,   35.37
1358     ...
1359   END CROSS-SECTION
1360
1361    Convert to NetCDF pts format which is
1362
1363    points:  (Nx2) Float array
1364    elevation: N Float array
1365    """
1366
1367    #FIXME: Can this be written feasibly using write_pts?
1368
1369    import os
1370    from Scientific.IO.NetCDF import NetCDFFile
1371    from Numeric import Float, zeros, reshape
1372
1373    root = basename_in
1374
1375    #Get ASCII file
1376    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1377
1378    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1379
1380    lines = infile.readlines()
1381    infile.close()
1382
1383    if verbose: print 'Converting to pts format'
1384
1385    i = 0
1386    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1387        i += 1
1388
1389    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1390    i += 1
1391
1392    assert lines[i].strip().upper().startswith('UNITS:')
1393    units = lines[i].strip().split()[1]
1394    i += 1
1395
1396    assert lines[i].strip().upper().startswith('DTM TYPE:')
1397    i += 1
1398
1399    assert lines[i].strip().upper().startswith('DTM:')
1400    i += 1
1401
1402    assert lines[i].strip().upper().startswith('STREAM')
1403    i += 1
1404
1405    assert lines[i].strip().upper().startswith('CROSS')
1406    i += 1
1407
1408    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1409    projection = lines[i].strip().split(':')[1]
1410    i += 1
1411
1412    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1413    zone = int(lines[i].strip().split(':')[1])
1414    i += 1
1415
1416    assert lines[i].strip().upper().startswith('DATUM:')
1417    datum = lines[i].strip().split(':')[1]
1418    i += 1
1419
1420    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1421    i += 1
1422
1423    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1427    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1428    i += 1
1429
1430
1431    #Now read all points
1432    points = []
1433    elevation = []
1434    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1435        for k, entry in enumerate(entries):
1436            points.append(entry[:2])
1437            elevation.append(entry[2])
1438
1439
1440    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1441          %(j+1, number_of_cross_sections)
1442    assert j+1 == number_of_cross_sections, msg
1443
1444    #Get output file
1445    if basename_out == None:
1446        ptsname = root + '.pts'
1447    else:
1448        ptsname = basename_out + '.pts'
1449
1450    #FIXME (DSG-ON): use loadASCII export_points_file
1451    if verbose: print 'Store to NetCDF file %s' %ptsname
1452    # NetCDF file definition
1453    outfile = NetCDFFile(ptsname, 'w')
1454
1455    #Create new file
1456    outfile.institution = 'Geoscience Australia'
1457    outfile.description = 'NetCDF pts format for compact and portable ' +\
1458                          'storage of spatial point data derived from HEC-RAS'
1459
1460    #Georeferencing
1461    outfile.zone = zone
1462    outfile.xllcorner = 0.0
1463    outfile.yllcorner = 0.0
1464    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1465    outfile.false_northing = 1000000
1466
1467    outfile.projection = projection
1468    outfile.datum = datum
1469    outfile.units = units
1470
1471
1472    outfile.createDimension('number_of_points', len(points))
1473    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1474
1475    # variable definitions
1476    outfile.createVariable('points', Float, ('number_of_points',
1477                                             'number_of_dimensions'))
1478    outfile.createVariable('elevation', Float, ('number_of_points',))
1479
1480    # Get handles to the variables
1481    outfile.variables['points'][:] = points
1482    outfile.variables['elevation'][:] = elevation
1483
1484    outfile.close()
1485
1486
1487
1488def sww2dem(basename_in, basename_out = None,
1489            quantity = None,
1490            timestep = None,
1491            reduction = None,
1492            cellsize = 10,
1493            NODATA_value = -9999,
1494            easting_min = None,
1495            easting_max = None,
1496            northing_min = None,
1497            northing_max = None,
1498            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1499            verbose = False,
1500            origin = None,
1501            datum = 'WGS84',
1502            format = 'ers'):
1503
1504    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1505
1506    Example (ASC):
1507
1508    ncols         3121
1509    nrows         1800
1510    xllcorner     722000
1511    yllcorner     5893000
1512    cellsize      25
1513    NODATA_value  -9999
1514    138.3698 137.4194 136.5062 135.5558 ..........
1515
1516    Also write accompanying file with same basename_in but extension .prj
1517    used to fix the UTM zone, datum, false northings and eastings.
1518
1519    The prj format is assumed to be as
1520
1521    Projection    UTM
1522    Zone          56
1523    Datum         WGS84
1524    Zunits        NO
1525    Units         METERS
1526    Spheroid      WGS84
1527    Xshift        0.0000000000
1528    Yshift        10000000.0000000000
1529    Parameters
1530
1531
1532    The parameter quantity must be the name of an existing quantity or
1533    an expression involving existing quantities. The default is
1534    'elevation'.
1535
1536    if timestep (an index) is given, output quantity at that timestep
1537
1538    if reduction is given use that to reduce quantity over all timesteps.
1539
1540    datum
1541
1542    format can be either 'asc' or 'ers'
1543    """
1544
1545    import sys
1546    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1547    from Numeric import array2string
1548
1549    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1550    from util import apply_expression_to_dictionary
1551
1552    msg = 'Format must be either asc or ers'
1553    assert format.lower() in ['asc', 'ers'], msg
1554
1555
1556    false_easting = 500000
1557    false_northing = 10000000
1558
1559    if quantity is None:
1560        quantity = 'elevation'
1561
1562    if reduction is None:
1563        reduction = max
1564
1565    if basename_out is None:
1566        basename_out = basename_in + '_%s' %quantity
1567
1568    swwfile = basename_in + '.sww'
1569    demfile = basename_out + '.' + format
1570    # Note the use of a .ers extension is optional (write_ermapper_grid will
1571    # deal with either option
1572
1573    #Read sww file
1574    if verbose: print 'Reading from %s' %swwfile
1575    from Scientific.IO.NetCDF import NetCDFFile
1576    fid = NetCDFFile(swwfile)
1577
1578    #Get extent and reference
1579    x = fid.variables['x'][:]
1580    y = fid.variables['y'][:]
1581    volumes = fid.variables['volumes'][:]
1582
1583    number_of_timesteps = fid.dimensions['number_of_timesteps']
1584    number_of_points = fid.dimensions['number_of_points']
1585    if origin is None:
1586
1587        #Get geo_reference
1588        #sww files don't have to have a geo_ref
1589        try:
1590            geo_reference = Geo_reference(NetCDFObject=fid)
1591        except AttributeError, e:
1592            geo_reference = Geo_reference() #Default georef object
1593
1594        xllcorner = geo_reference.get_xllcorner()
1595        yllcorner = geo_reference.get_yllcorner()
1596        zone = geo_reference.get_zone()
1597    else:
1598        zone = origin[0]
1599        xllcorner = origin[1]
1600        yllcorner = origin[2]
1601
1602
1603
1604    #FIXME: Refactor using code from file_function.statistics
1605    #Something like print swwstats(swwname)
1606    if verbose:
1607        x = fid.variables['x'][:]
1608        y = fid.variables['y'][:]
1609        times = fid.variables['time'][:]
1610        print '------------------------------------------------'
1611        print 'Statistics of SWW file:'
1612        print '  Name: %s' %swwfile
1613        print '  Reference:'
1614        print '    Lower left corner: [%f, %f]'\
1615              %(xllcorner, yllcorner)
1616        print '    Start time: %f' %fid.starttime[0]
1617        print '  Extent:'
1618        print '    x [m] in [%f, %f], len(x) == %d'\
1619              %(min(x.flat), max(x.flat), len(x.flat))
1620        print '    y [m] in [%f, %f], len(y) == %d'\
1621              %(min(y.flat), max(y.flat), len(y.flat))
1622        print '    t [s] in [%f, %f], len(t) == %d'\
1623              %(min(times), max(times), len(times))
1624        print '  Quantities [SI units]:'
1625        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1626            q = fid.variables[name][:].flat
1627            print '    %s in [%f, %f]' %(name, min(q), max(q))
1628
1629
1630
1631
1632
1633    #Get quantity and reduce if applicable
1634    if verbose: print 'Processing quantity %s' %quantity
1635
1636    #Turn NetCDF objects into Numeric arrays
1637    quantity_dict = {}
1638    for name in fid.variables.keys():
1639        quantity_dict[name] = fid.variables[name][:]
1640
1641
1642    q = apply_expression_to_dictionary(quantity, quantity_dict)
1643
1644
1645
1646    if len(q.shape) == 2:
1647        #q has a time component and needs to be reduced along
1648        #the temporal dimension
1649        if verbose: print 'Reducing quantity %s' %quantity
1650        q_reduced = zeros( number_of_points, Float )
1651
1652        for k in range(number_of_points):
1653            q_reduced[k] = reduction( q[:,k] )
1654
1655        q = q_reduced
1656
1657    #Post condition: Now q has dimension: number_of_points
1658    assert len(q.shape) == 1
1659    assert q.shape[0] == number_of_points
1660
1661
1662    if verbose:
1663        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1664
1665
1666    #Create grid and update xll/yll corner and x,y
1667
1668    #Relative extent
1669    if easting_min is None:
1670        xmin = min(x)
1671    else:
1672        xmin = easting_min - xllcorner
1673
1674    if easting_max is None:
1675        xmax = max(x)
1676    else:
1677        xmax = easting_max - xllcorner
1678
1679    if northing_min is None:
1680        ymin = min(y)
1681    else:
1682        ymin = northing_min - yllcorner
1683
1684    if northing_max is None:
1685        ymax = max(y)
1686    else:
1687        ymax = northing_max - yllcorner
1688
1689
1690
1691    if verbose: print 'Creating grid'
1692    ncols = int((xmax-xmin)/cellsize)+1
1693    nrows = int((ymax-ymin)/cellsize)+1
1694
1695
1696    #New absolute reference and coordinates
1697    newxllcorner = xmin+xllcorner
1698    newyllcorner = ymin+yllcorner
1699
1700    x = x+xllcorner-newxllcorner
1701    y = y+yllcorner-newyllcorner
1702
1703    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1704    assert len(vertex_points.shape) == 2
1705
1706
1707
1708    grid_points = zeros ( (ncols*nrows, 2), Float )
1709
1710
1711    for i in xrange(nrows):
1712        if format.lower() == 'asc':
1713            yg = i*cellsize
1714        else:
1715            #this will flip the order of the y values for ers
1716            yg = (nrows-i)*cellsize
1717
1718        for j in xrange(ncols):
1719            xg = j*cellsize
1720            k = i*ncols + j
1721
1722            grid_points[k,0] = xg
1723            grid_points[k,1] = yg
1724
1725    #Interpolate
1726    from least_squares import Interpolation
1727
1728
1729    #FIXME: This should be done with precrop = True (?), otherwise it'll
1730    #take forever. With expand_search  set to False, some grid points might
1731    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1732
1733    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1734                           precrop = False,
1735                           expand_search = expand_search,
1736                           verbose = verbose)
1737
1738
1739
1740    #Interpolate using quantity values
1741    if verbose: print 'Interpolating'
1742    grid_values = interp.interpolate(q).flat
1743
1744
1745    if verbose:
1746        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1747                                                      max(grid_values))
1748
1749    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1750    P = interp.mesh.get_boundary_polygon()
1751    outside_indices = outside_polygon(grid_points, P, closed=True)
1752
1753    for i in outside_indices:
1754        grid_values[i] = NODATA_value
1755
1756
1757
1758
1759    if format.lower() == 'ers':
1760        # setup ERS header information
1761        grid_values = reshape(grid_values,(nrows, ncols))
1762        header = {}
1763        header['datum'] = '"' + datum + '"'
1764        # FIXME The use of hardwired UTM and zone number needs to be made optional
1765        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1766        header['projection'] = '"UTM-' + str(zone) + '"'
1767        header['coordinatetype'] = 'EN'
1768        if header['coordinatetype'] == 'LL':
1769            header['longitude'] = str(newxllcorner)
1770            header['latitude'] = str(newyllcorner)
1771        elif header['coordinatetype'] == 'EN':
1772            header['eastings'] = str(newxllcorner)
1773            header['northings'] = str(newyllcorner)
1774        header['nullcellvalue'] = str(NODATA_value)
1775        header['xdimension'] = str(cellsize)
1776        header['ydimension'] = str(cellsize)
1777        header['value'] = '"' + quantity + '"'
1778        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1779
1780
1781        #Write
1782        if verbose: print 'Writing %s' %demfile
1783        import ermapper_grids
1784        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1785
1786        fid.close()
1787    else:
1788        #Write to Ascii format
1789
1790        #Write prj file
1791        prjfile = basename_out + '.prj'
1792
1793        if verbose: print 'Writing %s' %prjfile
1794        prjid = open(prjfile, 'w')
1795        prjid.write('Projection    %s\n' %'UTM')
1796        prjid.write('Zone          %d\n' %zone)
1797        prjid.write('Datum         %s\n' %datum)
1798        prjid.write('Zunits        NO\n')
1799        prjid.write('Units         METERS\n')
1800        prjid.write('Spheroid      %s\n' %datum)
1801        prjid.write('Xshift        %d\n' %false_easting)
1802        prjid.write('Yshift        %d\n' %false_northing)
1803        prjid.write('Parameters\n')
1804        prjid.close()
1805
1806
1807
1808        if verbose: print 'Writing %s' %demfile
1809
1810        ascid = open(demfile, 'w')
1811
1812        ascid.write('ncols         %d\n' %ncols)
1813        ascid.write('nrows         %d\n' %nrows)
1814        ascid.write('xllcorner     %d\n' %newxllcorner)
1815        ascid.write('yllcorner     %d\n' %newyllcorner)
1816        ascid.write('cellsize      %f\n' %cellsize)
1817        ascid.write('NODATA_value  %d\n' %NODATA_value)
1818
1819
1820        #Get bounding polygon from mesh
1821        #P = interp.mesh.get_boundary_polygon()
1822        #inside_indices = inside_polygon(grid_points, P)
1823
1824        for i in range(nrows):
1825            if verbose and i%((nrows+10)/10)==0:
1826                print 'Doing row %d of %d' %(i, nrows)
1827
1828            base_index = (nrows-i-1)*ncols
1829
1830            slice = grid_values[base_index:base_index+ncols]
1831            s = array2string(slice, max_line_width=sys.maxint)
1832            ascid.write(s[1:-1] + '\n')
1833
1834
1835            #print
1836            #for j in range(ncols):
1837            #    index = base_index+j#
1838            #    print grid_values[index],
1839            #    ascid.write('%f ' %grid_values[index])
1840            #ascid.write('\n')
1841
1842        #Close
1843        ascid.close()
1844        fid.close()
1845
1846#Backwards compatibility
1847def sww2asc(basename_in, basename_out = None,
1848            quantity = None,
1849            timestep = None,
1850            reduction = None,
1851            cellsize = 10,
1852            verbose = False,
1853            origin = None):
1854    print 'sww2asc will soon be obsoleted - please use sww2dem'
1855    sww2dem(basename_in,
1856            basename_out = basename_out,
1857            quantity = quantity,
1858            timestep = timestep,
1859            reduction = reduction,
1860            cellsize = cellsize,
1861            verbose = verbose,
1862            origin = origin,
1863            datum = 'WGS84',
1864            format = 'asc')
1865
1866def sww2ers(basename_in, basename_out = None,
1867            quantity = None,
1868            timestep = None,
1869            reduction = None,
1870            cellsize = 10,
1871            verbose = False,
1872            origin = None,
1873            datum = 'WGS84'):
1874    print 'sww2ers will soon be obsoleted - please use sww2dem'
1875    sww2dem(basename_in,
1876            basename_out = basename_out,
1877            quantity = quantity,
1878            timestep = timestep,
1879            reduction = reduction,
1880            cellsize = cellsize,
1881            verbose = verbose,
1882            origin = origin,
1883            datum = datum,
1884            format = 'ers')
1885################################# END COMPATIBILITY ##############
1886
1887
1888
1889def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1890                                  use_cache = False,
1891                                  verbose = False):
1892    """Read Digitial Elevation model from the following ASCII format (.asc)   
1893
1894    Example:
1895
1896    ncols         3121
1897    nrows         1800
1898    xllcorner     722000
1899    yllcorner     5893000
1900    cellsize      25
1901    NODATA_value  -9999
1902    138.3698 137.4194 136.5062 135.5558 ..........
1903
1904    Convert basename_in + '.asc' to NetCDF format (.dem)
1905    mimicking the ASCII format closely.
1906
1907
1908    An accompanying file with same basename_in but extension .prj must exist
1909    and is used to fix the UTM zone, datum, false northings and eastings.
1910
1911    The prj format is assumed to be as
1912
1913    Projection    UTM
1914    Zone          56
1915    Datum         WGS84
1916    Zunits        NO
1917    Units         METERS
1918    Spheroid      WGS84
1919    Xshift        0.0000000000
1920    Yshift        10000000.0000000000
1921    Parameters
1922    """
1923
1924
1925
1926    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1927
1928    if use_cache is True:
1929        from caching import cache
1930        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1931                       dependencies = [basename_in + '.asc'],
1932                       verbose = verbose)
1933
1934    else:
1935        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1936       
1937    return result
1938
1939   
1940   
1941
1942
1943
1944def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1945                                  verbose = False):
1946    """Read Digitial Elevation model from the following ASCII format (.asc)
1947
1948    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1949    """
1950
1951    import os
1952    from Scientific.IO.NetCDF import NetCDFFile
1953    from Numeric import Float, array
1954
1955    #root, ext = os.path.splitext(basename_in)
1956    root = basename_in
1957
1958    ###########################################
1959    # Read Meta data
1960    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1961    metadatafile = open(root + '.prj')
1962    metalines = metadatafile.readlines()
1963    metadatafile.close()
1964
1965    L = metalines[0].strip().split()
1966    assert L[0].strip().lower() == 'projection'
1967    projection = L[1].strip()                   #TEXT
1968
1969    L = metalines[1].strip().split()
1970    assert L[0].strip().lower() == 'zone'
1971    zone = int(L[1].strip())
1972
1973    L = metalines[2].strip().split()
1974    assert L[0].strip().lower() == 'datum'
1975    datum = L[1].strip()                        #TEXT
1976
1977    L = metalines[3].strip().split()
1978    assert L[0].strip().lower() == 'zunits'     #IGNORE
1979    zunits = L[1].strip()                       #TEXT
1980
1981    L = metalines[4].strip().split()
1982    assert L[0].strip().lower() == 'units'
1983    units = L[1].strip()                        #TEXT
1984
1985    L = metalines[5].strip().split()
1986    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1987    spheroid = L[1].strip()                     #TEXT
1988
1989    L = metalines[6].strip().split()
1990    assert L[0].strip().lower() == 'xshift'
1991    false_easting = float(L[1].strip())
1992
1993    L = metalines[7].strip().split()
1994    assert L[0].strip().lower() == 'yshift'
1995    false_northing = float(L[1].strip())
1996
1997    #print false_easting, false_northing, zone, datum
1998
1999
2000    ###########################################
2001    #Read DEM data
2002
2003    datafile = open(basename_in + '.asc')
2004
2005    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2006    lines = datafile.readlines()
2007    datafile.close()
2008
2009    if verbose: print 'Got', len(lines), ' lines'
2010
2011    ncols = int(lines[0].split()[1].strip())
2012    nrows = int(lines[1].split()[1].strip())
2013    xllcorner = float(lines[2].split()[1].strip())
2014    yllcorner = float(lines[3].split()[1].strip())
2015    cellsize = float(lines[4].split()[1].strip())
2016    NODATA_value = int(lines[5].split()[1].strip())
2017
2018    assert len(lines) == nrows + 6
2019
2020
2021    ##########################################
2022
2023
2024    if basename_out == None:
2025        netcdfname = root + '.dem'
2026    else:
2027        netcdfname = basename_out + '.dem'
2028
2029    if verbose: print 'Store to NetCDF file %s' %netcdfname
2030    # NetCDF file definition
2031    fid = NetCDFFile(netcdfname, 'w')
2032
2033    #Create new file
2034    fid.institution = 'Geoscience Australia'
2035    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2036                      'of spatial point data'
2037
2038    fid.ncols = ncols
2039    fid.nrows = nrows
2040    fid.xllcorner = xllcorner
2041    fid.yllcorner = yllcorner
2042    fid.cellsize = cellsize
2043    fid.NODATA_value = NODATA_value
2044
2045    fid.zone = zone
2046    fid.false_easting = false_easting
2047    fid.false_northing = false_northing
2048    fid.projection = projection
2049    fid.datum = datum
2050    fid.units = units
2051
2052
2053    # dimension definitions
2054    fid.createDimension('number_of_rows', nrows)
2055    fid.createDimension('number_of_columns', ncols)
2056
2057    # variable definitions
2058    fid.createVariable('elevation', Float, ('number_of_rows',
2059                                            'number_of_columns'))
2060
2061    # Get handles to the variables
2062    elevation = fid.variables['elevation']
2063
2064    #Store data
2065    n = len(lines[6:])
2066    for i, line in enumerate(lines[6:]):
2067        fields = line.split()
2068        if verbose and i%((n+10)/10)==0:
2069            print 'Processing row %d of %d' %(i, nrows)
2070
2071        elevation[i, :] = array([float(x) for x in fields])
2072
2073    fid.close()
2074
2075
2076
2077
2078
2079def ferret2sww(basename_in, basename_out = None,
2080               verbose = False,
2081               minlat = None, maxlat = None,
2082               minlon = None, maxlon = None,
2083               mint = None, maxt = None, mean_stage = 0,
2084               origin = None, zscale = 1,
2085               fail_on_NaN = True,
2086               NaN_filler = 0,
2087               elevation = None,
2088               inverted_bathymetry = False
2089               ): #FIXME: Bathymetry should be obtained
2090                                  #from MOST somehow.
2091                                  #Alternatively from elsewhere
2092                                  #or, as a last resort,
2093                                  #specified here.
2094                                  #The value of -100 will work
2095                                  #for the Wollongong tsunami
2096                                  #scenario but is very hacky
2097    """Convert 'Ferret' NetCDF format for wave propagation to
2098    sww format native to pyvolution.
2099
2100    Specify only basename_in and read files of the form
2101    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2102    relative height, x-velocity and y-velocity, respectively.
2103
2104    Also convert latitude and longitude to UTM. All coordinates are
2105    assumed to be given in the GDA94 datum.
2106
2107    min's and max's: If omitted - full extend is used.
2108    To include a value min may equal it, while max must exceed it.
2109    Lat and lon are assuemd to be in decimal degrees
2110
2111    origin is a 3-tuple with geo referenced
2112    UTM coordinates (zone, easting, northing)
2113
2114    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2115    which means that longitude is the fastest
2116    varying dimension (row major order, so to speak)
2117
2118    ferret2sww uses grid points as vertices in a triangular grid
2119    counting vertices from lower left corner upwards, then right
2120    """
2121
2122    import os
2123    from Scientific.IO.NetCDF import NetCDFFile
2124    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2125    from Numeric import allclose, around
2126
2127    precision = Float
2128
2129    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2130
2131    if minlat != None:
2132        assert -90 < minlat < 90 , msg
2133    if maxlat != None:
2134        assert -90 < maxlat < 90 , msg
2135    if minlon != None:
2136        assert -180 < minlon < 180 , msg
2137    if maxlon != None:
2138        assert -180 < maxlon < 180 , msg
2139 
2140
2141    #Get NetCDF data
2142    if verbose: print 'Reading files %s_*.nc' %basename_in
2143    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2144    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2145    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2146    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2147
2148    if basename_out is None:
2149        swwname = basename_in + '.sww'
2150    else:
2151        swwname = basename_out + '.sww'
2152
2153    times = file_h.variables['TIME']
2154    latitudes = file_h.variables['LAT']
2155    longitudes = file_h.variables['LON']
2156
2157
2158
2159    #Precision used by most for lat/lon is 4 or 5 decimals
2160    e_lat = around(file_e.variables['LAT'][:], 5)
2161    e_lon = around(file_e.variables['LON'][:], 5)
2162
2163    #Check that files are compatible
2164    assert allclose(latitudes, file_u.variables['LAT'])
2165    assert allclose(latitudes, file_v.variables['LAT'])
2166    assert allclose(latitudes, e_lat)
2167
2168    assert allclose(longitudes, file_u.variables['LON'])
2169    assert allclose(longitudes, file_v.variables['LON'])
2170    assert allclose(longitudes, e_lon)
2171
2172
2173
2174    if mint == None:
2175        jmin = 0
2176    else:
2177        jmin = searchsorted(times, mint)
2178
2179    if maxt == None:
2180        jmax=len(times)
2181    else:
2182        jmax = searchsorted(times, maxt)
2183
2184    if minlat == None:
2185        kmin=0
2186    else:
2187        kmin = searchsorted(latitudes, minlat)
2188
2189    if maxlat == None:
2190        kmax = len(latitudes)
2191    else:
2192        kmax = searchsorted(latitudes, maxlat)
2193
2194    if minlon == None:
2195        lmin=0
2196    else:
2197        lmin = searchsorted(longitudes, minlon)
2198
2199    if maxlon == None:
2200        lmax = len(longitudes)
2201    else:
2202        lmax = searchsorted(longitudes, maxlon)
2203
2204
2205
2206    times = times[jmin:jmax]
2207    latitudes = latitudes[kmin:kmax]
2208    longitudes = longitudes[lmin:lmax]
2209
2210
2211    if verbose: print 'cropping'
2212    zname = 'ELEVATION'
2213
2214
2215    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2216    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2217    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2218    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2219
2220    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2221    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2222    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2223    #        from Numeric import asarray
2224    #        elevations=elevations.tolist()
2225    #        elevations.reverse()
2226    #        elevations=asarray(elevations)
2227    #    else:
2228    #        from Numeric import asarray
2229    #        elevations=elevations.tolist()
2230    #        elevations.reverse()
2231    #        elevations=asarray(elevations)
2232    #        'print hmmm'
2233
2234
2235
2236    #Get missing values
2237    nan_ha = file_h.variables['HA'].missing_value[0]
2238    nan_ua = file_u.variables['UA'].missing_value[0]
2239    nan_va = file_v.variables['VA'].missing_value[0]
2240    if hasattr(file_e.variables[zname],'missing_value'):
2241        nan_e  = file_e.variables[zname].missing_value[0]
2242    else:
2243        nan_e = None
2244
2245    #Cleanup
2246    from Numeric import sometrue
2247
2248    missing = (amplitudes == nan_ha)
2249    if sometrue (missing):
2250        if fail_on_NaN:
2251            msg = 'NetCDFFile %s contains missing values'\
2252                  %(basename_in+'_ha.nc')
2253            raise msg
2254        else:
2255            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2256
2257    missing = (uspeed == nan_ua)
2258    if sometrue (missing):
2259        if fail_on_NaN:
2260            msg = 'NetCDFFile %s contains missing values'\
2261                  %(basename_in+'_ua.nc')
2262            raise msg
2263        else:
2264            uspeed = uspeed*(missing==0) + missing*NaN_filler
2265
2266    missing = (vspeed == nan_va)
2267    if sometrue (missing):
2268        if fail_on_NaN:
2269            msg = 'NetCDFFile %s contains missing values'\
2270                  %(basename_in+'_va.nc')
2271            raise msg
2272        else:
2273            vspeed = vspeed*(missing==0) + missing*NaN_filler
2274
2275
2276    missing = (elevations == nan_e)
2277    if sometrue (missing):
2278        if fail_on_NaN:
2279            msg = 'NetCDFFile %s contains missing values'\
2280                  %(basename_in+'_e.nc')
2281            raise msg
2282        else:
2283            elevations = elevations*(missing==0) + missing*NaN_filler
2284
2285    #######
2286
2287
2288
2289    number_of_times = times.shape[0]
2290    number_of_latitudes = latitudes.shape[0]
2291    number_of_longitudes = longitudes.shape[0]
2292
2293    msg = 'No data received'
2294    assert latitudes.shape[0] > 0, msg
2295    assert longitudes.shape[0] > 0, msg
2296
2297    assert amplitudes.shape[0] == number_of_times
2298    assert amplitudes.shape[1] == number_of_latitudes
2299    assert amplitudes.shape[2] == number_of_longitudes
2300
2301    if verbose:
2302        print '------------------------------------------------'
2303        print 'Statistics:'
2304        print '  Extent (lat/lon):'
2305        print '    lat in [%f, %f], len(lat) == %d'\
2306              %(min(latitudes.flat), max(latitudes.flat),
2307                len(latitudes.flat))
2308        print '    lon in [%f, %f], len(lon) == %d'\
2309              %(min(longitudes.flat), max(longitudes.flat),
2310                len(longitudes.flat))
2311        print '    t in [%f, %f], len(t) == %d'\
2312              %(min(times.flat), max(times.flat), len(times.flat))
2313
2314        q = amplitudes.flat
2315        name = 'Amplitudes (ha) [cm]'
2316        print %s in [%f, %f]' %(name, min(q), max(q))
2317
2318        q = uspeed.flat
2319        name = 'Speeds (ua) [cm/s]'
2320        print %s in [%f, %f]' %(name, min(q), max(q))
2321
2322        q = vspeed.flat
2323        name = 'Speeds (va) [cm/s]'
2324        print %s in [%f, %f]' %(name, min(q), max(q))
2325
2326        q = elevations.flat
2327        name = 'Elevations (e) [m]'
2328        print %s in [%f, %f]' %(name, min(q), max(q))
2329
2330
2331    #print number_of_latitudes, number_of_longitudes
2332    number_of_points = number_of_latitudes*number_of_longitudes
2333    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2334
2335
2336    file_h.close()
2337    file_u.close()
2338    file_v.close()
2339    file_e.close()
2340
2341
2342    # NetCDF file definition
2343    outfile = NetCDFFile(swwname, 'w')
2344
2345    #Create new file
2346    outfile.institution = 'Geoscience Australia'
2347    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2348                          %(basename_in + '_ha.nc',
2349                            basename_in + '_ua.nc',
2350                            basename_in + '_va.nc',
2351                            basename_in + '_e.nc')
2352
2353
2354    #For sww compatibility
2355    outfile.smoothing = 'Yes'
2356    outfile.order = 1
2357
2358    #Start time in seconds since the epoch (midnight 1/1/1970)
2359    outfile.starttime = starttime = times[0]
2360    times = times - starttime  #Store relative times
2361
2362    # dimension definitions
2363    outfile.createDimension('number_of_volumes', number_of_volumes)
2364
2365    outfile.createDimension('number_of_vertices', 3)
2366    outfile.createDimension('number_of_points', number_of_points)
2367
2368
2369    #outfile.createDimension('number_of_timesteps', len(times))
2370    outfile.createDimension('number_of_timesteps', len(times))
2371
2372    # variable definitions
2373    outfile.createVariable('x', precision, ('number_of_points',))
2374    outfile.createVariable('y', precision, ('number_of_points',))
2375    outfile.createVariable('elevation', precision, ('number_of_points',))
2376
2377    #FIXME: Backwards compatibility
2378    outfile.createVariable('z', precision, ('number_of_points',))
2379    #################################
2380
2381    outfile.createVariable('volumes', Int, ('number_of_volumes',
2382                                            'number_of_vertices'))
2383
2384    outfile.createVariable('time', precision,
2385                           ('number_of_timesteps',))
2386
2387    outfile.createVariable('stage', precision,
2388                           ('number_of_timesteps',
2389                            'number_of_points'))
2390
2391    outfile.createVariable('xmomentum', precision,
2392                           ('number_of_timesteps',
2393                            'number_of_points'))
2394
2395    outfile.createVariable('ymomentum', precision,
2396                           ('number_of_timesteps',
2397                            'number_of_points'))
2398
2399
2400    #Store
2401    from coordinate_transforms.redfearn import redfearn
2402    x = zeros(number_of_points, Float)  #Easting
2403    y = zeros(number_of_points, Float)  #Northing
2404
2405
2406    if verbose: print 'Making triangular grid'
2407    #Check zone boundaries
2408    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2409
2410    vertices = {}
2411    i = 0
2412    for k, lat in enumerate(latitudes):       #Y direction
2413        for l, lon in enumerate(longitudes):  #X direction
2414
2415            vertices[l,k] = i
2416
2417            zone, easting, northing = redfearn(lat,lon)
2418
2419            msg = 'Zone boundary crossed at longitude =', lon
2420            #assert zone == refzone, msg
2421            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2422            x[i] = easting
2423            y[i] = northing
2424            i += 1
2425
2426
2427    #Construct 2 triangles per 'rectangular' element
2428    volumes = []
2429    for l in range(number_of_longitudes-1):    #X direction
2430        for k in range(number_of_latitudes-1): #Y direction
2431            v1 = vertices[l,k+1]
2432            v2 = vertices[l,k]
2433            v3 = vertices[l+1,k+1]
2434            v4 = vertices[l+1,k]
2435
2436            volumes.append([v1,v2,v3]) #Upper element
2437            volumes.append([v4,v3,v2]) #Lower element
2438
2439    volumes = array(volumes)
2440
2441    if origin == None:
2442        zone = refzone
2443        xllcorner = min(x)
2444        yllcorner = min(y)
2445    else:
2446        zone = origin[0]
2447        xllcorner = origin[1]
2448        yllcorner = origin[2]
2449
2450
2451    outfile.xllcorner = xllcorner
2452    outfile.yllcorner = yllcorner
2453    outfile.zone = zone
2454
2455
2456    if elevation is not None:
2457        z = elevation
2458    else:
2459        if inverted_bathymetry:
2460            z = -1*elevations
2461        else:
2462            z = elevations
2463        #FIXME: z should be obtained from MOST and passed in here
2464
2465    from Numeric import resize
2466    z = resize(z,outfile.variables['z'][:].shape)
2467    outfile.variables['x'][:] = x - xllcorner
2468    outfile.variables['y'][:] = y - yllcorner
2469    outfile.variables['z'][:] = z             #FIXME HACK
2470    outfile.variables['elevation'][:] = z
2471    outfile.variables['time'][:] = times   #Store time relative
2472    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2473
2474
2475
2476    #Time stepping
2477    stage = outfile.variables['stage']
2478    xmomentum = outfile.variables['xmomentum']
2479    ymomentum = outfile.variables['ymomentum']
2480
2481    if verbose: print 'Converting quantities'
2482    n = len(times)
2483    for j in range(n):
2484        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2485        i = 0
2486        for k in range(number_of_latitudes):      #Y direction
2487            for l in range(number_of_longitudes): #X direction
2488                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2489                stage[j,i] = w
2490                h = w - z[i]
2491                xmomentum[j,i] = uspeed[j,k,l]/100*h
2492                ymomentum[j,i] = vspeed[j,k,l]/100*h
2493                i += 1
2494
2495    #outfile.close()
2496
2497    #FIXME: Refactor using code from file_function.statistics
2498    #Something like print swwstats(swwname)
2499    if verbose:
2500        x = outfile.variables['x'][:]
2501        y = outfile.variables['y'][:]
2502        times = outfile.variables['time'][:]
2503        print '------------------------------------------------'
2504        print 'Statistics of output file:'
2505        print '  Name: %s' %swwname
2506        print '  Reference:'
2507        print '    Lower left corner: [%f, %f]'\
2508              %(xllcorner, yllcorner)
2509        print '    Start time: %f' %starttime
2510        print '  Extent:'
2511        print '    x [m] in [%f, %f], len(x) == %d'\
2512              %(min(x.flat), max(x.flat), len(x.flat))
2513        print '    y [m] in [%f, %f], len(y) == %d'\
2514              %(min(y.flat), max(y.flat), len(y.flat))
2515        print '    t [s] in [%f, %f], len(t) == %d'\
2516              %(min(times), max(times), len(times))
2517        print '  Quantities [SI units]:'
2518        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2519            q = outfile.variables[name][:].flat
2520            print '    %s in [%f, %f]' %(name, min(q), max(q))
2521
2522
2523
2524    outfile.close()
2525
2526
2527
2528
2529
2530def timefile2netcdf(filename, quantity_names = None):
2531    """Template for converting typical text files with time series to
2532    NetCDF tms file.
2533
2534
2535    The file format is assumed to be either two fields separated by a comma:
2536
2537        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2538
2539    E.g
2540
2541      31/08/04 00:00:00, 1.328223 0 0
2542      31/08/04 00:15:00, 1.292912 0 0
2543
2544    will provide a time dependent function f(t) with three attributes
2545
2546    filename is assumed to be the rootname with extenisons .txt and .sww
2547    """
2548
2549    import time, calendar
2550    from Numeric import array
2551    from config import time_format
2552    from utilities.numerical_tools import ensure_numeric   
2553
2554
2555
2556    fid = open(filename + '.txt')
2557    line = fid.readline()
2558    fid.close()
2559
2560    fields = line.split(',')
2561    msg = 'File %s must have the format date, value0 value1 value2 ...'
2562    assert len(fields) == 2, msg
2563
2564    try:
2565        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2566    except ValueError:
2567        msg = 'First field in file %s must be' %filename
2568        msg += ' date-time with format %s.\n' %time_format
2569        msg += 'I got %s instead.' %fields[0]
2570        raise msg
2571
2572
2573    #Split values
2574    values = []
2575    for value in fields[1].split():
2576        values.append(float(value))
2577
2578    q = ensure_numeric(values)
2579
2580    msg = 'ERROR: File must contain at least one independent value'
2581    assert len(q.shape) == 1, msg
2582
2583
2584
2585    #Read times proper
2586    from Numeric import zeros, Float, alltrue
2587    from config import time_format
2588    import time, calendar
2589
2590    fid = open(filename + '.txt')
2591    lines = fid.readlines()
2592    fid.close()
2593
2594    N = len(lines)
2595    d = len(q)
2596
2597    T = zeros(N, Float)       #Time
2598    Q = zeros((N, d), Float)  #Values
2599
2600    for i, line in enumerate(lines):
2601        fields = line.split(',')
2602        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2603
2604        T[i] = realtime - starttime
2605
2606        for j, value in enumerate(fields[1].split()):
2607            Q[i, j] = float(value)
2608
2609    msg = 'File %s must list time as a monotonuosly ' %filename
2610    msg += 'increasing sequence'
2611    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2612
2613
2614    #Create NetCDF file
2615    from Scientific.IO.NetCDF import NetCDFFile
2616
2617    fid = NetCDFFile(filename + '.tms', 'w')
2618
2619
2620    fid.institution = 'Geoscience Australia'
2621    fid.description = 'Time series'
2622
2623
2624    #Reference point
2625    #Start time in seconds since the epoch (midnight 1/1/1970)
2626    #FIXME: Use Georef
2627    fid.starttime = starttime
2628
2629
2630    # dimension definitions
2631    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2632    #fid.createDimension('number_of_vertices', 3)
2633
2634
2635    fid.createDimension('number_of_timesteps', len(T))
2636
2637    fid.createVariable('time', Float, ('number_of_timesteps',))
2638
2639    fid.variables['time'][:] = T
2640
2641    for i in range(Q.shape[1]):
2642        try:
2643            name = quantity_names[i]
2644        except:
2645            name = 'Attribute%d'%i
2646
2647        fid.createVariable(name, Float, ('number_of_timesteps',))
2648        fid.variables[name][:] = Q[:,i]
2649
2650    fid.close()
2651
2652
2653def extent_sww(file_name):
2654    """
2655    Read in an sww file.
2656
2657    Input;
2658    file_name - the sww file
2659
2660    Output;
2661    z - Vector of bed elevation
2662    volumes - Array.  Each row has 3 values, representing
2663    the vertices that define the volume
2664    time - Vector of the times where there is stage information
2665    stage - array with respect to time and vertices (x,y)
2666    """
2667
2668
2669    from Scientific.IO.NetCDF import NetCDFFile
2670
2671    #Check contents
2672    #Get NetCDF
2673    fid = NetCDFFile(file_name, 'r')
2674
2675    # Get the variables
2676    x = fid.variables['x'][:]
2677    y = fid.variables['y'][:]
2678    stage = fid.variables['stage'][:]
2679    #print "stage",stage
2680    #print "stage.shap",stage.shape
2681    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2682    #print "min(stage)",min(stage)
2683
2684    fid.close()
2685
2686    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2687
2688
2689def sww2domain(filename,boundary=None,t=None,\
2690               fail_if_NaN=True,NaN_filler=0\
2691               ,verbose = False,very_verbose = False):
2692    """
2693    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2694
2695    Boundary is not recommended if domain.smooth is not selected, as it
2696    uses unique coordinates, but not unique boundaries. This means that
2697    the boundary file will not be compatable with the coordinates, and will
2698    give a different final boundary, or crash.
2699    """
2700    NaN=9.969209968386869e+036
2701    #initialise NaN.
2702
2703    from Scientific.IO.NetCDF import NetCDFFile
2704    from shallow_water import Domain
2705    from Numeric import asarray, transpose, resize
2706
2707    if verbose: print 'Reading from ', filename
2708    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2709    time = fid.variables['time']       #Timesteps
2710    if t is None:
2711        t = time[-1]
2712    time_interp = get_time_interp(time,t)
2713
2714    # Get the variables as Numeric arrays
2715    x = fid.variables['x'][:]             #x-coordinates of vertices
2716    y = fid.variables['y'][:]             #y-coordinates of vertices
2717    elevation = fid.variables['elevation']     #Elevation
2718    stage = fid.variables['stage']     #Water level
2719    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2720    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2721
2722    starttime = fid.starttime[0]
2723    volumes = fid.variables['volumes'][:] #Connectivity
2724    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2725
2726    conserved_quantities = []
2727    interpolated_quantities = {}
2728    other_quantities = []
2729
2730    # get geo_reference
2731    #sww files don't have to have a geo_ref
2732    try:
2733        geo_reference = Geo_reference(NetCDFObject=fid)
2734    except: #AttributeError, e:
2735        geo_reference = None
2736
2737    if verbose: print '    getting quantities'
2738    for quantity in fid.variables.keys():
2739        dimensions = fid.variables[quantity].dimensions
2740        if 'number_of_timesteps' in dimensions:
2741            conserved_quantities.append(quantity)
2742            interpolated_quantities[quantity]=\
2743                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2744        else: other_quantities.append(quantity)
2745
2746    other_quantities.remove('x')
2747    other_quantities.remove('y')
2748    other_quantities.remove('z')
2749    other_quantities.remove('volumes')
2750
2751    conserved_quantities.remove('time')
2752
2753    if verbose: print '    building domain'
2754    #    From domain.Domain:
2755    #    domain = Domain(coordinates, volumes,\
2756    #                    conserved_quantities = conserved_quantities,\
2757    #                    other_quantities = other_quantities,zone=zone,\
2758    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2759
2760    #   From shallow_water.Domain:
2761    coordinates=coordinates.tolist()
2762    volumes=volumes.tolist()
2763    #FIXME:should this be in mesh?(peter row)
2764    if fid.smoothing == 'Yes': unique = False
2765    else: unique = True
2766    if unique:
2767        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2768
2769
2770    try:   
2771        domain = Domain(coordinates, volumes, boundary)
2772    except AssertionError, e:
2773        fid.close()
2774        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2775        raise msg
2776
2777    if not boundary is None:
2778        domain.boundary = boundary
2779
2780    domain.geo_reference = geo_reference
2781
2782    domain.starttime=float(starttime)+float(t)
2783    domain.time=0.0
2784
2785    for quantity in other_quantities:
2786        try:
2787            NaN = fid.variables[quantity].missing_value
2788        except:
2789            pass #quantity has no missing_value number
2790        X = fid.variables[quantity][:]
2791        if very_verbose:
2792            print '       ',quantity
2793            print '        NaN =',NaN
2794            print '        max(X)'
2795            print '       ',max(X)
2796            print '        max(X)==NaN'
2797            print '       ',max(X)==NaN
2798            print ''
2799        if (max(X)==NaN) or (min(X)==NaN):
2800            if fail_if_NaN:
2801                msg = 'quantity "%s" contains no_data entry'%quantity
2802                raise msg
2803            else:
2804                data = (X<>NaN)
2805                X = (X*data)+(data==0)*NaN_filler
2806        if unique:
2807            X = resize(X,(len(X)/3,3))
2808        domain.set_quantity(quantity,X)
2809    #
2810    for quantity in conserved_quantities:
2811        try:
2812            NaN = fid.variables[quantity].missing_value
2813        except:
2814            pass #quantity has no missing_value number
2815        X = interpolated_quantities[quantity]
2816        if very_verbose:
2817            print '       ',quantity
2818            print '        NaN =',NaN
2819            print '        max(X)'
2820            print '       ',max(X)
2821            print '        max(X)==NaN'
2822            print '       ',max(X)==NaN
2823            print ''
2824        if (max(X)==NaN) or (min(X)==NaN):
2825            if fail_if_NaN:
2826                msg = 'quantity "%s" contains no_data entry'%quantity
2827                raise msg
2828            else:
2829                data = (X<>NaN)
2830                X = (X*data)+(data==0)*NaN_filler
2831        if unique:
2832            X = resize(X,(X.shape[0]/3,3))
2833        domain.set_quantity(quantity,X)
2834       
2835    fid.close()
2836    return domain
2837
2838def interpolated_quantity(saved_quantity,time_interp):
2839
2840    #given an index and ratio, interpolate quantity with respect to time.
2841    index,ratio = time_interp
2842    Q = saved_quantity
2843    if ratio > 0:
2844        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2845    else:
2846        q = Q[index]
2847    #Return vector of interpolated values
2848    return q
2849
2850def get_time_interp(time,t=None):
2851    #Finds the ratio and index for time interpolation.
2852    #It is borrowed from previous pyvolution code.
2853    if t is None:
2854        t=time[-1]
2855        index = -1
2856        ratio = 0.
2857    else:
2858        T = time
2859        tau = t
2860        index=0
2861        msg = 'Time interval derived from file %s [%s:%s]'\
2862            %('FIXMEfilename', T[0], T[-1])
2863        msg += ' does not match model time: %s' %tau
2864        if tau < time[0]: raise msg
2865        if tau > time[-1]: raise msg
2866        while tau > time[index]: index += 1
2867        while tau < time[index]: index -= 1
2868        if tau == time[index]:
2869            #Protect against case where tau == time[-1] (last time)
2870            # - also works in general when tau == time[i]
2871            ratio = 0
2872        else:
2873            #t is now between index and index+1
2874            ratio = (tau - time[index])/(time[index+1] - time[index])
2875    return (index,ratio)
2876
2877
2878def weed(coordinates,volumes,boundary = None):
2879    if type(coordinates)=='array':
2880        coordinates = coordinates.tolist()
2881    if type(volumes)=='array':
2882        volumes = volumes.tolist()
2883
2884    unique = False
2885    point_dict = {}
2886    same_point = {}
2887    for i in range(len(coordinates)):
2888        point = tuple(coordinates[i])
2889        if point_dict.has_key(point):
2890            unique = True
2891            same_point[i]=point
2892            #to change all point i references to point j
2893        else:
2894            point_dict[point]=i
2895            same_point[i]=point
2896
2897    coordinates = []
2898    i = 0
2899    for point in point_dict.keys():
2900        point = tuple(point)
2901        coordinates.append(list(point))
2902        point_dict[point]=i
2903        i+=1
2904
2905
2906    for volume in volumes:
2907        for i in range(len(volume)):
2908            index = volume[i]
2909            if index>-1:
2910                volume[i]=point_dict[same_point[index]]
2911
2912    new_boundary = {}
2913    if not boundary is None:
2914        for segment in boundary.keys():
2915            point0 = point_dict[same_point[segment[0]]]
2916            point1 = point_dict[same_point[segment[1]]]
2917            label = boundary[segment]
2918            #FIXME should the bounday attributes be concaterated
2919            #('exterior, pond') or replaced ('pond')(peter row)
2920
2921            if new_boundary.has_key((point0,point1)):
2922                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2923                                              #+','+label
2924
2925            elif new_boundary.has_key((point1,point0)):
2926                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2927                                              #+','+label
2928            else: new_boundary[(point0,point1)]=label
2929
2930        boundary = new_boundary
2931
2932    return coordinates,volumes,boundary
2933
2934
2935def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2936                 verbose=False):
2937    """Read Digitial Elevation model from the following NetCDF format (.dem)
2938
2939    Example:
2940
2941    ncols         3121
2942    nrows         1800
2943    xllcorner     722000
2944    yllcorner     5893000
2945    cellsize      25
2946    NODATA_value  -9999
2947    138.3698 137.4194 136.5062 135.5558 ..........
2948
2949    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2950    """
2951
2952    import os
2953    from Scientific.IO.NetCDF import NetCDFFile
2954    from Numeric import Float, zeros, sum, reshape, equal
2955
2956    root = basename_in
2957    inname = root + '.dem'
2958
2959    #Open existing netcdf file to read
2960    infile = NetCDFFile(inname, 'r')
2961    if verbose: print 'Reading DEM from %s' %inname
2962
2963    #Read metadata
2964    ncols = infile.ncols[0]
2965    nrows = infile.nrows[0]
2966    xllcorner = infile.xllcorner[0]
2967    yllcorner = infile.yllcorner[0]
2968    cellsize = infile.cellsize[0]
2969    NODATA_value = infile.NODATA_value[0]
2970    zone = infile.zone[0]
2971    false_easting = infile.false_easting[0]
2972    false_northing = infile.false_northing[0]
2973    projection = infile.projection
2974    datum = infile.datum
2975    units = infile.units
2976
2977    dem_elevation = infile.variables['elevation']
2978
2979    #Get output file name
2980    if basename_out == None:
2981        outname = root + '_' + repr(cellsize_new) + '.dem'
2982    else:
2983        outname = basename_out + '.dem'
2984
2985    if verbose: print 'Write decimated NetCDF file to %s' %outname
2986
2987    #Determine some dimensions for decimated grid
2988    (nrows_stencil, ncols_stencil) = stencil.shape
2989    x_offset = ncols_stencil / 2
2990    y_offset = nrows_stencil / 2
2991    cellsize_ratio = int(cellsize_new / cellsize)
2992    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2993    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2994
2995    #Open netcdf file for output
2996    outfile = NetCDFFile(outname, 'w')
2997
2998    #Create new file
2999    outfile.institution = 'Geoscience Australia'
3000    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3001                           'of spatial point data'
3002    #Georeferencing
3003    outfile.zone = zone
3004    outfile.projection = projection
3005    outfile.datum = datum
3006    outfile.units = units
3007
3008    outfile.cellsize = cellsize_new
3009    outfile.NODATA_value = NODATA_value
3010    outfile.false_easting = false_easting
3011    outfile.false_northing = false_northing
3012
3013    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3014    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3015    outfile.ncols = ncols_new
3016    outfile.nrows = nrows_new
3017
3018    # dimension definition
3019    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3020
3021    # variable definition
3022    outfile.createVariable('elevation', Float, ('number_of_points',))
3023
3024    # Get handle to the variable
3025    elevation = outfile.variables['elevation']
3026
3027    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3028
3029    #Store data
3030    global_index = 0
3031    for i in range(nrows_new):
3032        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3033        lower_index = global_index
3034        telev =  zeros(ncols_new, Float)
3035        local_index = 0
3036        trow = i * cellsize_ratio
3037
3038        for j in range(ncols_new):
3039            tcol = j * cellsize_ratio
3040            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3041
3042            #if dem contains 1 or more NODATA_values set value in
3043            #decimated dem to NODATA_value, else compute decimated
3044            #value using stencil
3045            if sum(sum(equal(tmp, NODATA_value))) > 0:
3046                telev[local_index] = NODATA_value
3047            else:
3048                telev[local_index] = sum(sum(tmp * stencil))
3049
3050            global_index += 1
3051            local_index += 1
3052
3053        upper_index = global_index
3054
3055        elevation[lower_index:upper_index] = telev
3056
3057    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3058
3059    infile.close()
3060    outfile.close()
3061
3062
3063
3064
3065def tsh2sww(filename, verbose=False): #test_tsh2sww
3066    """
3067    to check if a tsh/msh file 'looks' good.
3068    """
3069
3070    #FIXME Move to data_manager
3071    from shallow_water import Domain
3072    from pmesh2domain import pmesh_to_domain_instance
3073    import time, os
3074    from data_manager import get_dataobject
3075    from os import sep, path
3076    from util import mean
3077
3078    if verbose == True:print 'Creating domain from', filename
3079    domain = pmesh_to_domain_instance(filename, Domain)
3080    if verbose == True:print "Number of triangles = ", len(domain)
3081
3082    domain.smooth = True
3083    domain.format = 'sww'   #Native netcdf visualisation format
3084    file_path, filename = path.split(filename)
3085    filename, ext = path.splitext(filename)
3086    domain.filename = filename
3087    domain.reduction = mean
3088    if verbose == True:print "file_path",file_path
3089    if file_path == "":file_path = "."
3090    domain.set_datadir(file_path)
3091
3092    if verbose == True:
3093        print "Output written to " + domain.get_datadir() + sep + \
3094              domain.filename + "." + domain.format
3095    sww = get_dataobject(domain)
3096    sww.store_connectivity()
3097    sww.store_timestep('stage')
3098
3099
3100def asc_csiro2sww(bath_dir,
3101                  elevation_dir,
3102                  ucur_dir,
3103                  vcur_dir,
3104                  sww_file,
3105                  minlat = None, maxlat = None,
3106                  minlon = None, maxlon = None,
3107                  zscale=1,
3108                  mean_stage = 0,
3109                  fail_on_NaN = True,
3110                  elevation_NaN_filler = 0,
3111                  bath_prefix='ba',
3112                  elevation_prefix='el',
3113                  verbose=False):
3114    """
3115    Produce an sww boundary file, from esri ascii data from CSIRO.
3116
3117    Also convert latitude and longitude to UTM. All coordinates are
3118    assumed to be given in the GDA94 datum.
3119
3120    assume:
3121    All files are in esri ascii format
3122
3123    4 types of information
3124    bathymetry
3125    elevation
3126    u velocity
3127    v velocity
3128
3129    Assumptions
3130    The metadata of all the files is the same
3131    Each type is in a seperate directory
3132    One bath file with extention .000
3133    The time period is less than 24hrs and uniform.
3134    """
3135    from Scientific.IO.NetCDF import NetCDFFile
3136
3137    from coordinate_transforms.redfearn import redfearn
3138
3139    precision = Float # So if we want to change the precision its done here
3140
3141    # go in to the bath dir and load the only file,
3142    bath_files = os.listdir(bath_dir)
3143    #print "bath_files",bath_files
3144
3145    #fixme: make more general?
3146    bath_file = bath_files[0]
3147    bath_dir_file =  bath_dir + os.sep + bath_file
3148    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3149    #print "bath_metadata",bath_metadata
3150    #print "bath_grid",bath_grid
3151
3152    #Use the date.time of the bath file as a basis for
3153    #the start time for other files
3154    base_start = bath_file[-12:]
3155
3156    #go into the elevation dir and load the 000 file
3157    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3158                         + base_start
3159    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3160    #print "elevation_dir_file",elevation_dir_file
3161    #print "elevation_grid", elevation_grid
3162
3163    elevation_files = os.listdir(elevation_dir)
3164    ucur_files = os.listdir(ucur_dir)
3165    vcur_files = os.listdir(vcur_dir)
3166
3167    # the first elevation file should be the
3168    # file with the same base name as the bath data
3169    #print "elevation_files[0]",elevation_files[0]
3170    #print "'el' + base_start",'el' + base_start
3171    assert elevation_files[0] == 'el' + base_start
3172
3173    #assert bath_metadata == elevation_metadata
3174
3175
3176
3177    number_of_latitudes = bath_grid.shape[0]
3178    number_of_longitudes = bath_grid.shape[1]
3179    #number_of_times = len(os.listdir(elevation_dir))
3180    #number_of_points = number_of_latitudes*number_of_longitudes
3181    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3182
3183    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3184                  for x in range(number_of_longitudes)]
3185    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3186                 for y in range(number_of_latitudes)]
3187
3188     # reverse order of lat, so the fist lat represents the first grid row
3189    latitudes.reverse()
3190
3191    #print "latitudes - before _get_min_max_indexes",latitudes
3192    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3193                                                 minlat=minlat, maxlat=maxlat,
3194                                                 minlon=minlon, maxlon=maxlon)
3195
3196
3197    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3198    #print "bath_grid",bath_grid
3199    latitudes = latitudes[kmin:kmax]
3200    longitudes = longitudes[lmin:lmax]
3201    number_of_latitudes = len(latitudes)
3202    number_of_longitudes = len(longitudes)
3203    number_of_times = len(os.listdir(elevation_dir))
3204    number_of_points = number_of_latitudes*number_of_longitudes
3205    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3206    #print "number_of_points",number_of_points
3207
3208    #Work out the times
3209    if len(elevation_files) > 1:
3210        # Assume: The time period is less than 24hrs.
3211        time_period = (int(elevation_files[1][-3:]) - \
3212                      int(elevation_files[0][-3:]))*60*60
3213        times = [x*time_period for x in range(len(elevation_files))]
3214    else:
3215        times = [0.0]
3216    #print "times", times
3217    #print "number_of_latitudes", number_of_latitudes
3218    #print "number_of_longitudes", number_of_longitudes
3219    #print "number_of_times", number_of_times
3220    #print "latitudes", latitudes
3221    #print "longitudes", longitudes
3222
3223
3224    if verbose:
3225        print '------------------------------------------------'
3226        print 'Statistics:'
3227        print '  Extent (lat/lon):'
3228        print '    lat in [%f, %f], len(lat) == %d'\
3229              %(min(latitudes), max(latitudes),
3230                len(latitudes))
3231        print '    lon in [%f, %f], len(lon) == %d'\
3232              %(min(longitudes), max(longitudes),
3233                len(longitudes))
3234        print '    t in [%f, %f], len(t) == %d'\
3235              %(min(times), max(times), len(times))
3236
3237    ######### WRITE THE SWW FILE #############
3238    # NetCDF file definition
3239    outfile = NetCDFFile(sww_file, 'w')
3240
3241    #Create new file
3242    outfile.institution = 'Geoscience Australia'
3243    outfile.description = 'Converted from XXX'
3244
3245
3246    #For sww compatibility
3247    outfile.smoothing = 'Yes'
3248    outfile.order = 1
3249
3250    #Start time in seconds since the epoch (midnight 1/1/1970)
3251    outfile.starttime = starttime = times[0]
3252
3253
3254    # dimension definitions
3255    outfile.createDimension('number_of_volumes', number_of_volumes)
3256
3257    outfile.createDimension('number_of_vertices', 3)
3258    outfile.createDimension('number_of_points', number_of_points)
3259    outfile.createDimension('number_of_timesteps', number_of_times)
3260
3261    # variable definitions
3262    outfile.createVariable('x', precision, ('number_of_points',))
3263    outfile.createVariable('y', precision, ('number_of_points',))
3264    outfile.createVariable('elevation', precision, ('number_of_points',))
3265
3266    #FIXME: Backwards compatibility
3267    outfile.createVariable('z', precision, ('number_of_points',))
3268    #################################
3269
3270    outfile.createVariable('volumes', Int, ('number_of_volumes',
3271                                            'number_of_vertices'))
3272
3273    outfile.createVariable('time', precision,
3274                           ('number_of_timesteps',))
3275
3276    outfile.createVariable('stage', precision,
3277                           ('number_of_timesteps',
3278                            'number_of_points'))
3279
3280    outfile.createVariable('xmomentum', precision,
3281                           ('number_of_timesteps',
3282                            'number_of_points'))
3283
3284    outfile.createVariable('ymomentum', precision,
3285                           ('number_of_timesteps',
3286                            'number_of_points'))
3287
3288    #Store
3289    from coordinate_transforms.redfearn import redfearn
3290    x = zeros(number_of_points, Float)  #Easting
3291    y = zeros(number_of_points, Float)  #Northing
3292
3293    if verbose: print 'Making triangular grid'
3294    #Get zone of 1st point.
3295    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3296
3297    vertices = {}
3298    i = 0
3299    for k, lat in enumerate(latitudes):
3300        for l, lon in enumerate(longitudes):
3301
3302            vertices[l,k] = i
3303
3304            zone, easting, northing = redfearn(lat,lon)
3305
3306            msg = 'Zone boundary crossed at longitude =', lon
3307            #assert zone == refzone, msg
3308            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3309            x[i] = easting
3310            y[i] = northing
3311            i += 1
3312
3313
3314    #Construct 2 triangles per 'rectangular' element
3315    volumes = []
3316    for l in range(number_of_longitudes-1):    #X direction
3317        for k in range(number_of_latitudes-1): #Y direction
3318            v1 = vertices[l,k+1]
3319            v2 = vertices[l,k]
3320            v3 = vertices[l+1,k+1]
3321            v4 = vertices[l+1,k]
3322
3323            #Note, this is different to the ferrit2sww code
3324            #since the order of the lats is reversed.
3325            volumes.append([v1,v3,v2]) #Upper element
3326            volumes.append([v4,v2,v3]) #Lower element
3327
3328    volumes = array(volumes)
3329
3330    geo_ref = Geo_reference(refzone,min(x),min(y))
3331    geo_ref.write_NetCDF(outfile)
3332
3333    # This will put the geo ref in the middle
3334    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3335
3336
3337    if verbose:
3338        print '------------------------------------------------'
3339        print 'More Statistics:'
3340        print '  Extent (/lon):'
3341        print '    x in [%f, %f], len(lat) == %d'\
3342              %(min(x), max(x),
3343                len(x))
3344        print '    y in [%f, %f], len(lon) == %d'\
3345              %(min(y), max(y),
3346                len(y))
3347        print 'geo_ref: ',geo_ref
3348
3349    z = resize(bath_grid,outfile.variables['z'][:].shape)
3350    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3351    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3352    outfile.variables['z'][:] = z
3353    outfile.variables['elevation'][:] = z  #FIXME HACK
3354    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3355
3356    # do this to create an ok sww file.
3357    #outfile.variables['time'][:] = [0]   #Store time relative
3358    #outfile.variables['stage'] = z
3359    # put the next line up in the code after outfile.order = 1
3360    #number_of_times = 1
3361
3362    stage = outfile.variables['stage']
3363    xmomentum = outfile.variables['xmomentum']
3364    ymomentum = outfile.variables['ymomentum']
3365
3366    outfile.variables['time'][:] = times   #Store time relative
3367
3368    if verbose: print 'Converting quantities'
3369    n = number_of_times
3370    for j in range(number_of_times):
3371        # load in files
3372        elevation_meta, elevation_grid = \
3373           _read_asc(elevation_dir + os.sep + elevation_files[j])
3374
3375        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3376        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3377
3378        #print "elevation_grid",elevation_grid
3379        #cut matrix to desired size
3380        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3381        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3382        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3383        #print "elevation_grid",elevation_grid
3384        # handle missing values
3385        missing = (elevation_grid == elevation_meta['NODATA_value'])
3386        if sometrue (missing):
3387            if fail_on_NaN:
3388                msg = 'File %s contains missing values'\
3389                      %(elevation_files[j])
3390                raise msg
3391            else:
3392                elevation_grid = elevation_grid*(missing==0) + \
3393                                 missing*elevation_NaN_filler
3394
3395
3396        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3397        i = 0
3398        for k in range(number_of_latitudes):      #Y direction
3399            for l in range(number_of_longitudes): #X direction
3400                w = zscale*elevation_grid[k,l] + mean_stage
3401                stage[j,i] = w
3402                h = w - z[i]
3403                xmomentum[j,i] = u_momentum_grid[k,l]*h
3404                ymomentum[j,i] = v_momentum_grid[k,l]*h
3405                i += 1
3406    outfile.close()
3407
3408def _get_min_max_indexes(latitudes,longitudes,
3409                        minlat=None, maxlat=None,
3410                        minlon=None, maxlon=None):
3411    """
3412    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3413    specified with min/max lat/long
3414
3415    Think of the latitudes and longitudes describing a 2d surface.
3416    The area returned is, if possible, just big enough to cover the
3417    inputed max/min area. (This will not be possible if the max/min area
3418    has a section outside of the latitudes/longitudes area.)
3419
3420    assume latitudess & longitudes are sorted,
3421    long - from low to high (west to east, eg 148 - 151)
3422    lat - from high to low (north to south, eg -35 - -38)
3423    """
3424
3425    # reverse order of lat, so it's in ascending order
3426    latitudes.reverse()
3427    largest_lat_index = len(latitudes)-1
3428    #Cut out a smaller extent.
3429    if minlat == None:
3430        lat_min_index = 0
3431    else:
3432        lat_min_index = searchsorted(latitudes, minlat)-1
3433        if lat_min_index <0:
3434            lat_min_index = 0
3435
3436
3437    if maxlat == None:
3438        lat_max_index = largest_lat_index #len(latitudes)
3439    else:
3440        lat_max_index = searchsorted(latitudes, maxlat)
3441        if lat_max_index > largest_lat_index:
3442            lat_max_index = largest_lat_index
3443
3444    if minlon == None:
3445        lon_min_index = 0
3446    else:
3447        lon_min_index = searchsorted(longitudes, minlon)-1
3448        if lon_min_index <0:
3449            lon_min_index = 0
3450
3451    if maxlon == None:
3452        lon_max_index = len(longitudes)
3453    else:
3454        lon_max_index = searchsorted(longitudes, maxlon)
3455
3456    #Take into account that the latitude list was reversed
3457    latitudes.reverse() # Python passes by reference, need to swap back
3458    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3459                                   largest_lat_index - lat_min_index
3460    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3461    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3462
3463    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3464
3465
3466def _read_asc(filename, verbose=False):
3467    """Read esri file from the following ASCII format (.asc)
3468
3469    Example:
3470
3471    ncols         3121
3472    nrows         1800
3473    xllcorner     722000
3474    yllcorner     5893000
3475    cellsize      25
3476    NODATA_value  -9999
3477    138.3698 137.4194 136.5062 135.5558 ..........
3478
3479    """
3480
3481    datafile = open(filename)
3482
3483    if verbose: print 'Reading DEM from %s' %(filename)
3484    lines = datafile.readlines()
3485    datafile.close()
3486
3487    if verbose: print 'Got', len(lines), ' lines'
3488
3489    ncols = int(lines.pop(0).split()[1].strip())
3490    nrows = int(lines.pop(0).split()[1].strip())
3491    xllcorner = float(lines.pop(0).split()[1].strip())
3492    yllcorner = float(lines.pop(0).split()[1].strip())
3493    cellsize = float(lines.pop(0).split()[1].strip())
3494    NODATA_value = float(lines.pop(0).split()[1].strip())
3495
3496    assert len(lines) == nrows
3497
3498    #Store data
3499    grid = []
3500
3501    n = len(lines)
3502    for i, line in enumerate(lines):
3503        cells = line.split()
3504        assert len(cells) == ncols
3505        grid.append(array([float(x) for x in cells]))
3506    grid = array(grid)
3507
3508    return {'xllcorner':xllcorner,
3509            'yllcorner':yllcorner,
3510            'cellsize':cellsize,
3511            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.