source: inundation-numpy-branch/pyvolution/data_manager.py @ 3381

Last change on this file since 3381 was 2541, checked in by sexton, 19 years ago

fixed dem2pts AGAIN!!!

File size: 107.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58
59def make_filename(s):
60    """Transform argument string into a suitable filename
61    """
62
63    s = s.strip()
64    s = s.replace(' ', '_')
65    s = s.replace('(', '')
66    s = s.replace(')', '')
67    s = s.replace('__', '_')
68
69    return s
70
71
72def check_dir(path, verbose=None):
73    """Check that specified path exists.
74    If path does not exist it will be created if possible
75
76    USAGE:
77       checkdir(path, verbose):
78
79    ARGUMENTS:
80        path -- Directory
81        verbose -- Flag verbose output (default: None)
82
83    RETURN VALUE:
84        Verified path including trailing separator
85
86    """
87
88    import os, sys
89    import os.path
90
91    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
92        unix = 0
93    else:
94        unix = 1
95
96
97    if path[-1] != os.sep:
98        path = path + os.sep  # Add separator for directories
99
100    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
101    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
102        try:
103            exitcode=os.mkdir(path)
104
105            # Change access rights if possible
106            #
107            if unix:
108                exitcode=os.system('chmod 775 '+path)
109            else:
110                pass  # FIXME: What about acces rights under Windows?
111
112            if verbose: print 'MESSAGE: Directory', path, 'created.'
113
114        except:
115            print 'WARNING: Directory', path, 'could not be created.'
116            if unix:
117                path = '/tmp/'
118            else:
119                path = 'C:'
120
121            print 'Using directory %s instead' %path
122
123    return(path)
124
125
126
127def del_dir(path):
128    """Recursively delete directory path and all its contents
129    """
130
131    import os
132
133    if os.path.isdir(path):
134        for file in os.listdir(path):
135            X = os.path.join(path, file)
136
137
138            if os.path.isdir(X) and not os.path.islink(X):
139                del_dir(X)
140            else:
141                try:
142                    os.remove(X)
143                except:
144                    print "Could not remove file %s" %X
145
146        os.rmdir(path)
147
148
149
150def create_filename(datadir, filename, format, size=None, time=None):
151
152    import os
153    #from config import data_dir
154
155    FN = check_dir(datadir) + filename
156
157    if size is not None:
158        FN += '_size%d' %size
159
160    if time is not None:
161        FN += '_time%.2f' %time
162
163    FN += '.' + format
164    return FN
165
166
167def get_files(datadir, filename, format, size):
168    """Get all file (names) with gven name, size and format
169    """
170
171    import glob
172
173    import os
174    #from config import data_dir
175
176    dir = check_dir(datadir)
177
178    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
179    return glob.glob(pattern)
180
181
182
183#Generic class for storing output to e.g. visualisation or checkpointing
184class Data_format:
185    """Generic interface to data formats
186    """
187
188
189    def __init__(self, domain, extension, mode = 'w'):
190        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
191                                        '''   'w' (write)'''+\
192                                        '''   'r' (read)''' +\
193                                        '''   'a' (append)'''
194
195        #Create filename
196        #self.filename = create_filename(domain.get_datadir(),
197        #domain.get_name(), extension, len(domain))
198
199
200        self.filename = create_filename(domain.get_datadir(),
201                        domain.get_name(), extension)
202
203        #print 'F', self.filename
204        self.timestep = 0
205        self.number_of_volumes = len(domain)
206        self.domain = domain
207
208
209        #FIXME: Should we have a general set_precision function?
210
211
212
213#Class for storing output to e.g. visualisation
214class Data_format_sww(Data_format):
215    """Interface to native NetCDF format (.sww) for storing model output
216
217    There are two kinds of data
218
219    1: Constant data: Vertex coordinates and field values. Stored once
220    2: Variable data: Conserved quantities. Stored once per timestep.
221
222    All data is assumed to reside at vertex locations.
223    """
224
225
226    def __init__(self, domain, mode = 'w',\
227                 max_size = 2000000000,
228                 recursion = False):
229        from Scientific.IO.NetCDF import NetCDFFile
230        from Numeric import Int, Float, Float32
231
232        self.precision = Float32 #Use single precision
233        if hasattr(domain, 'max_size'):
234            self.max_size = domain.max_size #file size max is 2Gig
235        else:
236            self.max_size = max_size
237        self.recursion = recursion
238        self.mode = mode
239
240        Data_format.__init__(self, domain, 'sww', mode)
241
242
243        # NetCDF file definition
244        fid = NetCDFFile(self.filename, mode)
245
246        if mode == 'w':
247
248            #Create new file
249            fid.institution = 'Geoscience Australia'
250            fid.description = 'Output from pyvolution suitable for plotting'
251
252            if domain.smooth:
253                fid.smoothing = 'Yes'
254            else:
255                fid.smoothing = 'No'
256
257            fid.order = domain.default_order
258
259            if hasattr(domain, 'texture'):
260                fid.texture = domain.texture
261            #else:
262            #    fid.texture = 'None'
263
264            #Reference point
265            #Start time in seconds since the epoch (midnight 1/1/1970)
266            #FIXME: Use Georef
267            fid.starttime = domain.starttime
268
269            # dimension definitions
270            fid.createDimension('number_of_volumes', self.number_of_volumes)
271            fid.createDimension('number_of_vertices', 3)
272
273            if domain.smooth is True:
274                fid.createDimension('number_of_points', len(domain.vertexlist))
275            else:
276                fid.createDimension('number_of_points', 3*self.number_of_volumes)
277
278            fid.createDimension('number_of_timesteps', None) #extensible
279
280            # variable definitions
281            fid.createVariable('x', self.precision, ('number_of_points',))
282            fid.createVariable('y', self.precision, ('number_of_points',))
283            fid.createVariable('elevation', self.precision, ('number_of_points',))
284            if domain.geo_reference is not None:
285                domain.geo_reference.write_NetCDF(fid)
286
287            #FIXME: Backwards compatibility
288            fid.createVariable('z', self.precision, ('number_of_points',))
289            #################################
290
291            fid.createVariable('volumes', Int, ('number_of_volumes',
292                                                'number_of_vertices'))
293
294            fid.createVariable('time', self.precision,
295                               ('number_of_timesteps',))
296
297            fid.createVariable('stage', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('xmomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305            fid.createVariable('ymomentum', self.precision,
306                               ('number_of_timesteps',
307                                'number_of_points'))
308
309        #Close
310        fid.close()
311
312
313    def store_connectivity(self):
314        """Specialisation of store_connectivity for net CDF format
315
316        Writes x,y,z coordinates of triangles constituting
317        the bed elevation.
318        """
319
320        from Scientific.IO.NetCDF import NetCDFFile
321
322        from Numeric import concatenate, Int
323
324        domain = self.domain
325
326        #Get NetCDF
327        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
328
329        # Get the variables
330        x = fid.variables['x']
331        y = fid.variables['y']
332        z = fid.variables['elevation']
333
334        volumes = fid.variables['volumes']
335
336        # Get X, Y and bed elevation Z
337        Q = domain.quantities['elevation']
338        X,Y,Z,V = Q.get_vertex_values(xy=True,
339                                      precision = self.precision)
340
341
342
343        x[:] = X.astype(self.precision)
344        y[:] = Y.astype(self.precision)
345        z[:] = Z.astype(self.precision)
346
347        #FIXME: Backwards compatibility
348        z = fid.variables['z']
349        z[:] = Z.astype(self.precision)
350        ################################
351
352        volumes[:] = V.astype(volumes.typecode())
353
354        #Close
355        fid.close()
356
357    def store_timestep(self, names):
358        """Store time and named quantities to file
359        """
360        from Scientific.IO.NetCDF import NetCDFFile
361        import types
362        from time import sleep
363        from os import stat
364
365        #minimum_allowed_depth = 0.001
366        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
367        from Numeric import choose
368
369        #Get NetCDF
370        retries = 0
371        file_open = False
372        while not file_open and retries < 10:
373            try:
374                fid = NetCDFFile(self.filename, 'a') #Open existing file
375            except IOError:
376                #This could happen if someone was reading the file.
377                #In that case, wait a while and try again
378                msg = 'Warning (store_timestep): File %s could not be opened'\
379                      %self.filename
380                msg += ' - trying step %s again' %self.domain.time
381                print msg
382                retries += 1
383                sleep(1)
384            else:
385                file_open = True
386
387        if not file_open:
388            msg = 'File %s could not be opened for append' %self.filename
389            raise msg
390
391
392
393        #Check to see if the file is already too big:
394        time = fid.variables['time']
395        i = len(time)+1
396        file_size = stat(self.filename)[6]
397        file_size_increase =  file_size/i
398        if file_size + file_size_increase > self.max_size*(2**self.recursion):
399            #in order to get the file name and start time correct,
400            #I change the domian.filename and domain.starttime.
401            #This is the only way to do this without changing
402            #other modules (I think).
403
404            #write a filename addon that won't break swollens reader
405            #(10.sww is bad)
406            filename_ext = '_time_%s'%self.domain.time
407            filename_ext = filename_ext.replace('.', '_')
408            #remember the old filename, then give domain a
409            #name with the extension
410            old_domain_filename = self.domain.filename
411            if not self.recursion:
412                self.domain.filename = self.domain.filename+filename_ext
413
414            #change the domain starttime to the current time
415            old_domain_starttime = self.domain.starttime
416            self.domain.starttime = self.domain.time
417
418            #build a new data_structure.
419            next_data_structure=\
420                Data_format_sww(self.domain, mode=self.mode,\
421                                max_size = self.max_size,\
422                                recursion = self.recursion+1)
423            if not self.recursion:
424                print '    file_size = %s'%file_size
425                print '    saving file to %s'%next_data_structure.filename
426            #set up the new data_structure
427            self.domain.writer = next_data_structure
428
429            #FIXME - could be cleaner to use domain.store_timestep etc.
430            next_data_structure.store_connectivity()
431            next_data_structure.store_timestep(names)
432            fid.sync()
433            fid.close()
434
435            #restore the old starttime and filename
436            self.domain.starttime = old_domain_starttime
437            self.domain.filename = old_domain_filename
438        else:
439            self.recursion = False
440            domain = self.domain
441
442            # Get the variables
443            time = fid.variables['time']
444            stage = fid.variables['stage']
445            xmomentum = fid.variables['xmomentum']
446            ymomentum = fid.variables['ymomentum']
447            i = len(time)
448
449            #Store time
450            time[i] = self.domain.time
451
452
453            if type(names) not in [types.ListType, types.TupleType]:
454                names = [names]
455
456            for name in names:
457                # Get quantity
458                Q = domain.quantities[name]
459                A,V = Q.get_vertex_values(xy = False,
460                                          precision = self.precision)
461
462                #FIXME: Make this general (see below)
463                if name == 'stage':
464                    z = fid.variables['elevation']
465                    #print z[:]
466                    #print A-z[:]
467                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
468                    stage[i,:] = A.astype(self.precision)
469                elif name == 'xmomentum':
470                    xmomentum[i,:] = A.astype(self.precision)
471                elif name == 'ymomentum':
472                    ymomentum[i,:] = A.astype(self.precision)
473
474                #As in....
475                #eval( name + '[i,:] = A.astype(self.precision)' )
476                #FIXME: But we need a UNIT test for that before refactoring
477
478
479
480            #Flush and close
481            fid.sync()
482            fid.close()
483
484
485
486#Class for handling checkpoints data
487class Data_format_cpt(Data_format):
488    """Interface to native NetCDF format (.cpt)
489    """
490
491
492    def __init__(self, domain, mode = 'w'):
493        from Scientific.IO.NetCDF import NetCDFFile
494        from Numeric import Int, Float, Float
495
496        self.precision = Float #Use full precision
497
498        Data_format.__init__(self, domain, 'sww', mode)
499
500
501        # NetCDF file definition
502        fid = NetCDFFile(self.filename, mode)
503
504        if mode == 'w':
505            #Create new file
506            fid.institution = 'Geoscience Australia'
507            fid.description = 'Checkpoint data'
508            #fid.smooth = domain.smooth
509            fid.order = domain.default_order
510
511            # dimension definitions
512            fid.createDimension('number_of_volumes', self.number_of_volumes)
513            fid.createDimension('number_of_vertices', 3)
514
515            #Store info at all vertices (no smoothing)
516            fid.createDimension('number_of_points', 3*self.number_of_volumes)
517            fid.createDimension('number_of_timesteps', None) #extensible
518
519            # variable definitions
520
521            #Mesh
522            fid.createVariable('x', self.precision, ('number_of_points',))
523            fid.createVariable('y', self.precision, ('number_of_points',))
524
525
526            fid.createVariable('volumes', Int, ('number_of_volumes',
527                                                'number_of_vertices'))
528
529            fid.createVariable('time', self.precision,
530                               ('number_of_timesteps',))
531
532            #Allocate space for all quantities
533            for name in domain.quantities.keys():
534                fid.createVariable(name, self.precision,
535                                   ('number_of_timesteps',
536                                    'number_of_points'))
537
538        #Close
539        fid.close()
540
541
542    def store_checkpoint(self):
543        """
544        Write x,y coordinates of triangles.
545        Write connectivity (
546        constituting
547        the bed elevation.
548        """
549
550        from Scientific.IO.NetCDF import NetCDFFile
551
552        from Numeric import concatenate
553
554        domain = self.domain
555
556        #Get NetCDF
557        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
558
559        # Get the variables
560        x = fid.variables['x']
561        y = fid.variables['y']
562
563        volumes = fid.variables['volumes']
564
565        # Get X, Y and bed elevation Z
566        Q = domain.quantities['elevation']
567        X,Y,Z,V = Q.get_vertex_values(xy=True,
568                                      precision = self.precision)
569
570
571
572        x[:] = X.astype(self.precision)
573        y[:] = Y.astype(self.precision)
574        z[:] = Z.astype(self.precision)
575
576        volumes[:] = V
577
578        #Close
579        fid.close()
580
581
582    def store_timestep(self, name):
583        """Store time and named quantity to file
584        """
585        from Scientific.IO.NetCDF import NetCDFFile
586        from time import sleep
587
588        #Get NetCDF
589        retries = 0
590        file_open = False
591        while not file_open and retries < 10:
592            try:
593                fid = NetCDFFile(self.filename, 'a') #Open existing file
594            except IOError:
595                #This could happen if someone was reading the file.
596                #In that case, wait a while and try again
597                msg = 'Warning (store_timestep): File %s could not be opened'\
598                          %self.filename
599                msg += ' - trying again'
600                print msg
601                retries += 1
602                sleep(1)
603            else:
604                file_open = True
605
606            if not file_open:
607                msg = 'File %s could not be opened for append' %self.filename
608            raise msg
609
610
611        domain = self.domain
612
613        # Get the variables
614        time = fid.variables['time']
615        stage = fid.variables['stage']
616        i = len(time)
617
618        #Store stage
619        time[i] = self.domain.time
620
621        # Get quantity
622        Q = domain.quantities[name]
623        A,V = Q.get_vertex_values(xy=False,
624                                  precision = self.precision)
625
626        stage[i,:] = A.astype(self.precision)
627
628        #Flush and close
629        fid.sync()
630        fid.close()
631
632
633
634
635
636#Function for storing xya output
637#FIXME Not done yet for this version
638class Data_format_xya(Data_format):
639    """Generic interface to data formats
640    """
641
642
643    def __init__(self, domain, mode = 'w'):
644        from Scientific.IO.NetCDF import NetCDFFile
645        from Numeric import Int, Float, Float32
646
647        self.precision = Float32 #Use single precision
648
649        Data_format.__init__(self, domain, 'xya', mode)
650
651
652
653    #FIXME -This is the old xya format
654    def store_all(self):
655        """Specialisation of store all for xya format
656
657        Writes x,y,z coordinates of triangles constituting
658        the bed elevation.
659        """
660
661        from Numeric import concatenate
662
663        domain = self.domain
664
665        fd = open(self.filename, 'w')
666
667
668        if domain.smooth is True:
669            number_of_points =  len(domain.vertexlist)
670        else:
671            number_of_points = 3*self.number_of_volumes
672
673        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
674
675        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
676                 " # <vertex #> <x> <y> [attributes]" + "\n")
677
678
679        # Get X, Y, bed elevation and friction (index=0,1)
680        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
681                                           indices = (0,1), precision = self.precision)
682
683        bed_eles = A[:,0]
684        fricts = A[:,1]
685
686        # Get stage (index=0)
687        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
688                                       indices = (0,), precision = self.precision)
689
690        stages = B[:,0]
691
692        #<vertex #> <x> <y> [attributes]
693        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
694                                               stages, fricts):
695
696            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
697            fd.write(s)
698
699        #close
700        fd.close()
701
702
703    def store_timestep(self, t, V0, V1, V2):
704        """Store time, water heights (and momentums) to file
705        """
706        pass
707
708
709#Auxiliary
710def write_obj(filename,x,y,z):
711    """Store x,y,z vectors into filename (obj format)
712       Vectors are assumed to have dimension (M,3) where
713       M corresponds to the number elements.
714       triangles are assumed to be disconnected
715
716       The three numbers in each vector correspond to three vertices,
717
718       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
719
720    """
721    #print 'Writing obj to %s' % filename
722
723    import os.path
724
725    root, ext = os.path.splitext(filename)
726    if ext == '.obj':
727        FN = filename
728    else:
729        FN = filename + '.obj'
730
731
732    outfile = open(FN, 'wb')
733    outfile.write("# Triangulation as an obj file\n")
734
735    M, N = x.shape
736    assert N==3  #Assuming three vertices per element
737
738    for i in range(M):
739        for j in range(N):
740            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
741
742    for i in range(M):
743        base = i*N
744        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
745
746    outfile.close()
747
748
749#########################################################
750#Conversion routines
751########################################################
752
753def sww2obj(basefilename, size):
754    """Convert netcdf based data output to obj
755    """
756    from Scientific.IO.NetCDF import NetCDFFile
757
758    from Numeric import Float, zeros
759
760    #Get NetCDF
761    FN = create_filename('.', basefilename, 'sww', size)
762    print 'Reading from ', FN
763    fid = NetCDFFile(FN, 'r')  #Open existing file for read
764
765
766    # Get the variables
767    x = fid.variables['x']
768    y = fid.variables['y']
769    z = fid.variables['elevation']
770    time = fid.variables['time']
771    stage = fid.variables['stage']
772
773    M = size  #Number of lines
774    xx = zeros((M,3), Float)
775    yy = zeros((M,3), Float)
776    zz = zeros((M,3), Float)
777
778    for i in range(M):
779        for j in range(3):
780            xx[i,j] = x[i+j*M]
781            yy[i,j] = y[i+j*M]
782            zz[i,j] = z[i+j*M]
783
784    #Write obj for bathymetry
785    FN = create_filename('.', basefilename, 'obj', size)
786    write_obj(FN,xx,yy,zz)
787
788
789    #Now read all the data with variable information, combine with
790    #x,y info and store as obj
791
792    for k in range(len(time)):
793        t = time[k]
794        print 'Processing timestep %f' %t
795
796        for i in range(M):
797            for j in range(3):
798                zz[i,j] = stage[k,i+j*M]
799
800
801        #Write obj for variable data
802        #FN = create_filename(basefilename, 'obj', size, time=t)
803        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
804        write_obj(FN,xx,yy,zz)
805
806
807def dat2obj(basefilename):
808    """Convert line based data output to obj
809    FIXME: Obsolete?
810    """
811
812    import glob, os
813    from config import data_dir
814
815
816    #Get bathymetry and x,y's
817    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
818
819    from Numeric import zeros, Float
820
821    M = len(lines)  #Number of lines
822    x = zeros((M,3), Float)
823    y = zeros((M,3), Float)
824    z = zeros((M,3), Float)
825
826    ##i = 0
827    for i, line in enumerate(lines):
828        tokens = line.split()
829        values = map(float,tokens)
830
831        for j in range(3):
832            x[i,j] = values[j*3]
833            y[i,j] = values[j*3+1]
834            z[i,j] = values[j*3+2]
835
836        ##i += 1
837
838
839    #Write obj for bathymetry
840    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
841
842
843    #Now read all the data files with variable information, combine with
844    #x,y info
845    #and store as obj
846
847    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
848
849    for filename in files:
850        print 'Processing %s' % filename
851
852        lines = open(data_dir+os.sep+filename,'r').readlines()
853        assert len(lines) == M
854        root, ext = os.path.splitext(filename)
855
856        #Get time from filename
857        i0 = filename.find('_time=')
858        if i0 == -1:
859            #Skip bathymetry file
860            continue
861
862        i0 += 6  #Position where time starts
863        i1 = filename.find('.dat')
864
865        if i1 > i0:
866            t = float(filename[i0:i1])
867        else:
868            raise 'Hmmmm'
869
870
871
872        ##i = 0
873        for i, line in enumerate(lines):
874            tokens = line.split()
875            values = map(float,tokens)
876
877            for j in range(3):
878                z[i,j] = values[j]
879
880            ##i += 1
881
882        #Write obj for variable data
883        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
884
885
886def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
887    """Read netcdf filename1, pick timesteps first:step:last and save to
888    nettcdf file filename2
889    """
890    from Scientific.IO.NetCDF import NetCDFFile
891
892    #Get NetCDF
893    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
894    outfile = NetCDFFile(filename2, 'w')  #Open new file
895
896
897    #Copy dimensions
898    for d in infile.dimensions:
899        outfile.createDimension(d, infile.dimensions[d])
900
901    for name in infile.variables:
902        var = infile.variables[name]
903        outfile.createVariable(name, var.typecode(), var.dimensions)
904
905
906    #Copy the static variables
907    for name in infile.variables:
908        if name == 'time' or name == 'stage':
909            pass
910        else:
911            #Copy
912            outfile.variables[name][:] = infile.variables[name][:]
913
914    #Copy selected timesteps
915    time = infile.variables['time']
916    stage = infile.variables['stage']
917
918    newtime = outfile.variables['time']
919    newstage = outfile.variables['stage']
920
921    if last is None:
922        last = len(time)
923
924    selection = range(first, last, step)
925    for i, j in enumerate(selection):
926        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
927        newtime[i] = time[j]
928        newstage[i,:] = stage[j,:]
929
930    #Close
931    infile.close()
932    outfile.close()
933
934
935#Get data objects
936def get_dataobject(domain, mode='w'):
937    """Return instance of class of given format using filename
938    """
939
940    cls = eval('Data_format_%s' %domain.format)
941    return cls(domain, mode)
942
943def xya2pts(basename_in, basename_out=None, verbose=False,
944            #easting_min=None, easting_max=None,
945            #northing_min=None, northing_max=None,
946            stride = 1,
947            attribute_name = 'elevation',
948            z_func = None):
949    """Read points data from ascii (.xya)
950
951    Example:
952
953              x(m)        y(m)        z(m)
954         0.00000e+00  0.00000e+00  1.3535000e-01
955         0.00000e+00  1.40000e-02  1.3535000e-01
956
957
958
959    Convert to NetCDF pts format which is
960
961    points:  (Nx2) Float array
962    elevation: N Float array
963
964    Only lines that contain three numeric values are processed
965
966    If z_func is specified, it will be applied to the third field
967    """
968
969    import os
970    #from Scientific.IO.NetCDF import NetCDFFile
971    from Numeric import Float, arrayrange, concatenate
972
973    root, ext = os.path.splitext(basename_in)
974
975    if ext == '': ext = '.xya'
976
977    #Get NetCDF
978    infile = open(root + ext, 'r')  #Open existing xya file for read
979
980    if verbose: print 'Reading xya points from %s' %(root + ext)
981
982    points = []
983    attribute = []
984    for i, line in enumerate(infile.readlines()):
985
986        if i % stride != 0: continue
987
988        fields = line.split()
989
990        try:
991            assert len(fields) == 3
992        except:
993            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
994
995        try:
996            x = float( fields[0] )
997            y = float( fields[1] )
998            z = float( fields[2] )
999        except:
1000            continue
1001
1002        points.append( [x, y] )
1003
1004        if callable(z_func):
1005            attribute.append(z_func(z))
1006        else:
1007            attribute.append(z)
1008
1009
1010    #Get output file
1011    if basename_out == None:
1012        ptsname = root + '.pts'
1013    else:
1014        ptsname = basename_out + '.pts'
1015
1016    if verbose: print 'Store to NetCDF file %s' %ptsname
1017    write_ptsfile(ptsname, points, attribute, attribute_name)
1018
1019
1020
1021######Obsoleted by export_points in load_mesh
1022def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1023                  zone=None, xllcorner=None, yllcorner=None):
1024    """Write points and associated attribute to pts (NetCDF) format
1025    """
1026
1027    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1028
1029    from Numeric import Float
1030
1031    if attribute_name is None:
1032        attribute_name = 'attribute'
1033
1034
1035    from Scientific.IO.NetCDF import NetCDFFile
1036
1037    # NetCDF file definition
1038    outfile = NetCDFFile(ptsname, 'w')
1039
1040
1041    #Create new file
1042    outfile.institution = 'Geoscience Australia'
1043    outfile.description = 'NetCDF pts format for compact and '\
1044                          'portable storage of spatial point data'
1045
1046
1047    #Georeferencing
1048    from coordinate_transforms.geo_reference import Geo_reference
1049    if zone is None:
1050        assert xllcorner is None, 'xllcorner must be None'
1051        assert yllcorner is None, 'yllcorner must be None'       
1052        Geo_reference().write_NetCDF(outfile)
1053    else:
1054        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1055       
1056
1057
1058    outfile.createDimension('number_of_points', len(points))
1059    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1060
1061    # variable definitions
1062    outfile.createVariable('points', Float, ('number_of_points',
1063                                             'number_of_dimensions'))
1064    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1065
1066    # Get handles to the variables
1067    nc_points = outfile.variables['points']
1068    nc_attribute = outfile.variables[attribute_name]
1069
1070    #Store data
1071    nc_points[:, :] = points
1072    nc_attribute[:] = attribute
1073
1074    outfile.close()
1075
1076
1077def dem2pts(basename_in, basename_out=None, 
1078            easting_min=None, easting_max=None,
1079            northing_min=None, northing_max=None,
1080            use_cache=False, verbose=False,):
1081    """Read Digitial Elevation model from the following NetCDF format (.dem)
1082
1083    Example:
1084
1085    ncols         3121
1086    nrows         1800
1087    xllcorner     722000
1088    yllcorner     5893000
1089    cellsize      25
1090    NODATA_value  -9999
1091    138.3698 137.4194 136.5062 135.5558 ..........
1092
1093    Convert to NetCDF pts format which is
1094
1095    points:  (Nx2) Float array
1096    elevation: N Float array
1097    """
1098
1099
1100
1101    kwargs = {'basename_out': basename_out, 
1102              'easting_min': easting_min,
1103              'easting_max': easting_max,
1104              'northing_min': northing_min,
1105              'northing_max': northing_max,
1106              'verbose': verbose}
1107
1108    if use_cache is True:
1109        from caching import cache
1110        result = cache(_dem2pts, basename_in, kwargs,
1111                       dependencies = [basename_in + '.dem'],
1112                       verbose = verbose)
1113
1114    else:
1115        result = apply(_dem2pts, [basename_in], kwargs)
1116       
1117    return result
1118
1119
1120def _dem2pts(basename_in, basename_out=None, verbose=False,
1121            easting_min=None, easting_max=None,
1122            northing_min=None, northing_max=None):
1123    """Read Digitial Elevation model from the following NetCDF format (.dem)
1124
1125    Internal function. See public function dem2pts for details.   
1126    """
1127
1128    #FIXME: Can this be written feasibly using write_pts?
1129
1130    import os
1131    from Scientific.IO.NetCDF import NetCDFFile
1132    from Numeric import Float, zeros, reshape, sum
1133
1134    root = basename_in
1135
1136    #Get NetCDF
1137    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1138
1139    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1140
1141    ncols = infile.ncols[0]
1142    nrows = infile.nrows[0]
1143    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1144    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1145    cellsize = infile.cellsize[0]
1146    NODATA_value = infile.NODATA_value[0]
1147    dem_elevation = infile.variables['elevation']
1148
1149    zone = infile.zone[0]
1150    false_easting = infile.false_easting[0]
1151    false_northing = infile.false_northing[0]
1152
1153    #Text strings
1154    projection = infile.projection
1155    datum = infile.datum
1156    units = infile.units
1157
1158
1159    #Get output file
1160    if basename_out == None:
1161        ptsname = root + '.pts'
1162    else:
1163        ptsname = basename_out + '.pts'
1164
1165    if verbose: print 'Store to NetCDF file %s' %ptsname
1166    # NetCDF file definition
1167    outfile = NetCDFFile(ptsname, 'w')
1168
1169    #Create new file
1170    outfile.institution = 'Geoscience Australia'
1171    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1172                      'of spatial point data'
1173    #assign default values
1174    if easting_min is None: easting_min = xllcorner
1175    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1176    if northing_min is None: northing_min = yllcorner
1177    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1178
1179    #compute offsets to update georeferencing
1180    easting_offset = xllcorner - easting_min
1181    northing_offset = yllcorner - northing_min
1182
1183    #Georeferencing
1184    outfile.zone = zone
1185    outfile.xllcorner = easting_min #Easting of lower left corner
1186    outfile.yllcorner = northing_min #Northing of lower left corner
1187    outfile.false_easting = false_easting
1188    outfile.false_northing = false_northing
1189
1190    outfile.projection = projection
1191    outfile.datum = datum
1192    outfile.units = units
1193
1194
1195    #Grid info (FIXME: probably not going to be used, but heck)
1196    outfile.ncols = ncols
1197    outfile.nrows = nrows
1198
1199    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1200    totalnopoints = nrows*ncols
1201
1202    # calculating number of NODATA_values for each row in clipped region
1203    #FIXME: use array operations to do faster
1204    nn = 0
1205    k = 0
1206    i1_0 = 0
1207    j1_0 = 0
1208    for i in range(nrows):
1209        y = (nrows-i)*cellsize + yllcorner
1210        for j in range(ncols):
1211            x = j*cellsize + xllcorner
1212            if easting_min <= x <= easting_max and \
1213               northing_min <= y <= northing_max:
1214                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1215
1216                if k == 0:
1217                    i1_0 = i
1218                    j1_0 = j
1219                k += 1   
1220
1221            index1 = j1_0
1222            index2 = j
1223            index3 = i1_0
1224            index4 = i
1225
1226
1227    index2 += 1
1228    nrows2 = index4 - index3
1229    ncols2 = index2 - index1 + 1
1230   
1231    # dimension definitions
1232    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1233    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1234   
1235    clippednopoints = nrows_in_bounding_box*ncols_in_bounding_box
1236    nopoints = clippednopoints-nn
1237
1238    if verbose and nn > 0:
1239        print 'There are %d values in the elevation' %totalnopoints
1240        #print 'There are %d values in the clipped elevation' %clippednopoints
1241        print 'There are %d NODATA_values in the clipped elevation' %nn
1242       
1243    outfile.createDimension('number_of_points', nopoints)
1244    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1245
1246    # variable definitions
1247    outfile.createVariable('points', Float, ('number_of_points',
1248                                             'number_of_dimensions'))
1249    outfile.createVariable('elevation', Float, ('number_of_points',))
1250
1251    # Get handles to the variables
1252    points = outfile.variables['points']
1253    elevation = outfile.variables['elevation']
1254
1255   
1256    #Store data
1257    global_index = 0
1258    for i in range(nrows):
1259        if verbose and i%((nrows+10)/10)==0:
1260            print 'Processing row %d of %d' %(i, nrows)
1261
1262        lower_index = global_index
1263
1264        v = dem_elevation_r[i,index1:index2]
1265        no_NODATA = sum(v == NODATA_value)
1266        if no_NODATA > 0:
1267            newcols = len(v) - no_NODATA #ncols_in_bounding_box - no_NODATA
1268        else:
1269            newcols = len(v) #ncols_in_bounding_box
1270
1271        #print 'here', len(v), no_NODATA, newcols, v
1272        #print 'here again', dem_elevation_r[i,:]
1273        telev = zeros(newcols, Float)
1274        tpoints = zeros((newcols, 2), Float)
1275
1276        local_index = 0
1277       
1278        y = (nrows-i)*cellsize + yllcorner
1279        for j in range(ncols):
1280
1281            x = j*cellsize + xllcorner
1282            if easting_min <= x <= easting_max and \
1283               northing_min <= y <= northing_max and \
1284               dem_elevation_r[i,j] <> NODATA_value:
1285                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1286                telev[local_index] = dem_elevation_r[i, j]
1287                global_index += 1
1288                local_index += 1
1289               
1290        upper_index = global_index
1291
1292        if upper_index == lower_index + newcols: 
1293            points[lower_index:upper_index, :] = tpoints
1294            elevation[lower_index:upper_index] = telev
1295
1296    assert global_index == nopoints, 'index not equal to number of points'
1297
1298    infile.close()
1299    outfile.close()
1300
1301
1302
1303def _read_hecras_cross_sections(lines):
1304    """Return block of surface lines for each cross section
1305    Starts with SURFACE LINE,
1306    Ends with END CROSS-SECTION
1307    """
1308
1309    points = []
1310
1311    reading_surface = False
1312    for i, line in enumerate(lines):
1313
1314        if len(line.strip()) == 0:    #Ignore blanks
1315            continue
1316
1317        if lines[i].strip().startswith('SURFACE LINE'):
1318            reading_surface = True
1319            continue
1320
1321        if lines[i].strip().startswith('END') and reading_surface:
1322            yield points
1323            reading_surface = False
1324            points = []
1325
1326        if reading_surface:
1327            fields = line.strip().split(',')
1328            easting = float(fields[0])
1329            northing = float(fields[1])
1330            elevation = float(fields[2])
1331            points.append([easting, northing, elevation])
1332
1333
1334
1335
1336def hecras_cross_sections2pts(basename_in,
1337                              basename_out=None,
1338                              verbose=False):
1339    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1340
1341    Example:
1342
1343
1344# RAS export file created on Mon 15Aug2005 11:42
1345# by HEC-RAS Version 3.1.1
1346
1347BEGIN HEADER:
1348  UNITS: METRIC
1349  DTM TYPE: TIN
1350  DTM: v:\1\cit\perth_topo\river_tin
1351  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1352  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1353  MAP PROJECTION: UTM
1354  PROJECTION ZONE: 50
1355  DATUM: AGD66
1356  VERTICAL DATUM:
1357  NUMBER OF REACHES:  19
1358  NUMBER OF CROSS-SECTIONS:  14206
1359END HEADER:
1360
1361
1362Only the SURFACE LINE data of the following form will be utilised
1363
1364  CROSS-SECTION:
1365    STREAM ID:Southern-Wungong
1366    REACH ID:Southern-Wungong
1367    STATION:19040.*
1368    CUT LINE:
1369      405548.671603161 , 6438142.7594925
1370      405734.536092045 , 6438326.10404912
1371      405745.130459356 , 6438331.48627354
1372      405813.89633823 , 6438368.6272789
1373    SURFACE LINE:
1374     405548.67,   6438142.76,   35.37
1375     405552.24,   6438146.28,   35.41
1376     405554.78,   6438148.78,   35.44
1377     405555.80,   6438149.79,   35.44
1378     405559.37,   6438153.31,   35.45
1379     405560.88,   6438154.81,   35.44
1380     405562.93,   6438156.83,   35.42
1381     405566.50,   6438160.35,   35.38
1382     405566.99,   6438160.83,   35.37
1383     ...
1384   END CROSS-SECTION
1385
1386    Convert to NetCDF pts format which is
1387
1388    points:  (Nx2) Float array
1389    elevation: N Float array
1390    """
1391
1392    #FIXME: Can this be written feasibly using write_pts?
1393
1394    import os
1395    from Scientific.IO.NetCDF import NetCDFFile
1396    from Numeric import Float, zeros, reshape
1397
1398    root = basename_in
1399
1400    #Get ASCII file
1401    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1402
1403    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1404
1405    lines = infile.readlines()
1406    infile.close()
1407
1408    if verbose: print 'Converting to pts format'
1409
1410    i = 0
1411    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1412        i += 1
1413
1414    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1415    i += 1
1416
1417    assert lines[i].strip().upper().startswith('UNITS:')
1418    units = lines[i].strip().split()[1]
1419    i += 1
1420
1421    assert lines[i].strip().upper().startswith('DTM TYPE:')
1422    i += 1
1423
1424    assert lines[i].strip().upper().startswith('DTM:')
1425    i += 1
1426
1427    assert lines[i].strip().upper().startswith('STREAM')
1428    i += 1
1429
1430    assert lines[i].strip().upper().startswith('CROSS')
1431    i += 1
1432
1433    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1434    projection = lines[i].strip().split(':')[1]
1435    i += 1
1436
1437    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1438    zone = int(lines[i].strip().split(':')[1])
1439    i += 1
1440
1441    assert lines[i].strip().upper().startswith('DATUM:')
1442    datum = lines[i].strip().split(':')[1]
1443    i += 1
1444
1445    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1446    i += 1
1447
1448    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1449    i += 1
1450
1451    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1452    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1453    i += 1
1454
1455
1456    #Now read all points
1457    points = []
1458    elevation = []
1459    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1460        for k, entry in enumerate(entries):
1461            points.append(entry[:2])
1462            elevation.append(entry[2])
1463
1464
1465    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1466          %(j+1, number_of_cross_sections)
1467    assert j+1 == number_of_cross_sections, msg
1468
1469    #Get output file
1470    if basename_out == None:
1471        ptsname = root + '.pts'
1472    else:
1473        ptsname = basename_out + '.pts'
1474
1475    #FIXME (DSG-ON): use loadASCII export_points_file
1476    if verbose: print 'Store to NetCDF file %s' %ptsname
1477    # NetCDF file definition
1478    outfile = NetCDFFile(ptsname, 'w')
1479
1480    #Create new file
1481    outfile.institution = 'Geoscience Australia'
1482    outfile.description = 'NetCDF pts format for compact and portable ' +\
1483                          'storage of spatial point data derived from HEC-RAS'
1484
1485    #Georeferencing
1486    outfile.zone = zone
1487    outfile.xllcorner = 0.0
1488    outfile.yllcorner = 0.0
1489    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1490    outfile.false_northing = 1000000
1491
1492    outfile.projection = projection
1493    outfile.datum = datum
1494    outfile.units = units
1495
1496
1497    outfile.createDimension('number_of_points', len(points))
1498    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1499
1500    # variable definitions
1501    outfile.createVariable('points', Float, ('number_of_points',
1502                                             'number_of_dimensions'))
1503    outfile.createVariable('elevation', Float, ('number_of_points',))
1504
1505    # Get handles to the variables
1506    outfile.variables['points'][:] = points
1507    outfile.variables['elevation'][:] = elevation
1508
1509    outfile.close()
1510
1511
1512
1513def sww2dem(basename_in, basename_out = None,
1514            quantity = None,
1515            timestep = None,
1516            reduction = None,
1517            cellsize = 10,
1518            NODATA_value = -9999,
1519            easting_min = None,
1520            easting_max = None,
1521            northing_min = None,
1522            northing_max = None,
1523            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1524            verbose = False,
1525            origin = None,
1526            datum = 'WGS84',
1527            format = 'ers'):
1528
1529    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1530
1531    Example (ASC):
1532
1533    ncols         3121
1534    nrows         1800
1535    xllcorner     722000
1536    yllcorner     5893000
1537    cellsize      25
1538    NODATA_value  -9999
1539    138.3698 137.4194 136.5062 135.5558 ..........
1540
1541    Also write accompanying file with same basename_in but extension .prj
1542    used to fix the UTM zone, datum, false northings and eastings.
1543
1544    The prj format is assumed to be as
1545
1546    Projection    UTM
1547    Zone          56
1548    Datum         WGS84
1549    Zunits        NO
1550    Units         METERS
1551    Spheroid      WGS84
1552    Xshift        0.0000000000
1553    Yshift        10000000.0000000000
1554    Parameters
1555
1556
1557    The parameter quantity must be the name of an existing quantity or
1558    an expression involving existing quantities. The default is
1559    'elevation'.
1560
1561    if timestep (an index) is given, output quantity at that timestep
1562
1563    if reduction is given use that to reduce quantity over all timesteps.
1564
1565    datum
1566
1567    format can be either 'asc' or 'ers'
1568    """
1569
1570    import sys
1571    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1572    from Numeric import array2string
1573
1574    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1575    from util import apply_expression_to_dictionary
1576
1577    msg = 'Format must be either asc or ers'
1578    assert format.lower() in ['asc', 'ers'], msg
1579
1580
1581    false_easting = 500000
1582    false_northing = 10000000
1583
1584    if quantity is None:
1585        quantity = 'elevation'
1586
1587    if reduction is None:
1588        reduction = max
1589
1590    if basename_out is None:
1591        basename_out = basename_in + '_%s' %quantity
1592
1593    swwfile = basename_in + '.sww'
1594    demfile = basename_out + '.' + format
1595    # Note the use of a .ers extension is optional (write_ermapper_grid will
1596    # deal with either option
1597
1598    #Read sww file
1599    if verbose: print 'Reading from %s' %swwfile
1600    from Scientific.IO.NetCDF import NetCDFFile
1601    fid = NetCDFFile(swwfile)
1602
1603    #Get extent and reference
1604    x = fid.variables['x'][:]
1605    y = fid.variables['y'][:]
1606    volumes = fid.variables['volumes'][:]
1607
1608    number_of_timesteps = fid.dimensions['number_of_timesteps']
1609    number_of_points = fid.dimensions['number_of_points']
1610    if origin is None:
1611
1612        #Get geo_reference
1613        #sww files don't have to have a geo_ref
1614        try:
1615            geo_reference = Geo_reference(NetCDFObject=fid)
1616        except AttributeError, e:
1617            geo_reference = Geo_reference() #Default georef object
1618
1619        xllcorner = geo_reference.get_xllcorner()
1620        yllcorner = geo_reference.get_yllcorner()
1621        zone = geo_reference.get_zone()
1622    else:
1623        zone = origin[0]
1624        xllcorner = origin[1]
1625        yllcorner = origin[2]
1626
1627
1628
1629    #FIXME: Refactor using code from file_function.statistics
1630    #Something like print swwstats(swwname)
1631    if verbose:
1632        x = fid.variables['x'][:]
1633        y = fid.variables['y'][:]
1634        times = fid.variables['time'][:]
1635        print '------------------------------------------------'
1636        print 'Statistics of SWW file:'
1637        print '  Name: %s' %swwfile
1638        print '  Reference:'
1639        print '    Lower left corner: [%f, %f]'\
1640              %(xllcorner, yllcorner)
1641        print '    Start time: %f' %fid.starttime[0]
1642        print '  Extent:'
1643        print '    x [m] in [%f, %f], len(x) == %d'\
1644              %(min(x.flat), max(x.flat), len(x.flat))
1645        print '    y [m] in [%f, %f], len(y) == %d'\
1646              %(min(y.flat), max(y.flat), len(y.flat))
1647        print '    t [s] in [%f, %f], len(t) == %d'\
1648              %(min(times), max(times), len(times))
1649        print '  Quantities [SI units]:'
1650        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1651            q = fid.variables[name][:].flat
1652            print '    %s in [%f, %f]' %(name, min(q), max(q))
1653
1654
1655
1656
1657
1658    #Get quantity and reduce if applicable
1659    if verbose: print 'Processing quantity %s' %quantity
1660
1661    #Turn NetCDF objects into Numeric arrays
1662    quantity_dict = {}
1663    for name in fid.variables.keys():
1664        quantity_dict[name] = fid.variables[name][:]
1665
1666
1667    q = apply_expression_to_dictionary(quantity, quantity_dict)
1668
1669
1670
1671    if len(q.shape) == 2:
1672        #q has a time component and needs to be reduced along
1673        #the temporal dimension
1674        if verbose: print 'Reducing quantity %s' %quantity
1675        q_reduced = zeros( number_of_points, Float )
1676
1677        for k in range(number_of_points):
1678            q_reduced[k] = reduction( q[:,k] )
1679
1680        q = q_reduced
1681
1682    #Post condition: Now q has dimension: number_of_points
1683    assert len(q.shape) == 1
1684    assert q.shape[0] == number_of_points
1685
1686
1687    if verbose:
1688        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1689
1690
1691    #Create grid and update xll/yll corner and x,y
1692
1693    #Relative extent
1694    if easting_min is None:
1695        xmin = min(x)
1696    else:
1697        xmin = easting_min - xllcorner
1698
1699    if easting_max is None:
1700        xmax = max(x)
1701    else:
1702        xmax = easting_max - xllcorner
1703
1704    if northing_min is None:
1705        ymin = min(y)
1706    else:
1707        ymin = northing_min - yllcorner
1708
1709    if northing_max is None:
1710        ymax = max(y)
1711    else:
1712        ymax = northing_max - yllcorner
1713
1714
1715
1716    if verbose: print 'Creating grid'
1717    ncols = int((xmax-xmin)/cellsize)+1
1718    nrows = int((ymax-ymin)/cellsize)+1
1719
1720
1721    #New absolute reference and coordinates
1722    newxllcorner = xmin+xllcorner
1723    newyllcorner = ymin+yllcorner
1724
1725    x = x+xllcorner-newxllcorner
1726    y = y+yllcorner-newyllcorner
1727
1728    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1729    assert len(vertex_points.shape) == 2
1730
1731
1732
1733    grid_points = zeros ( (ncols*nrows, 2), Float )
1734
1735
1736    for i in xrange(nrows):
1737        if format.lower() == 'asc':
1738            yg = i*cellsize
1739        else:
1740            #this will flip the order of the y values for ers
1741            yg = (nrows-i)*cellsize
1742
1743        for j in xrange(ncols):
1744            xg = j*cellsize
1745            k = i*ncols + j
1746
1747            grid_points[k,0] = xg
1748            grid_points[k,1] = yg
1749
1750    #Interpolate
1751    from least_squares import Interpolation
1752
1753
1754    #FIXME: This should be done with precrop = True (?), otherwise it'll
1755    #take forever. With expand_search  set to False, some grid points might
1756    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1757
1758    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1759                           precrop = False,
1760                           expand_search = expand_search,
1761                           verbose = verbose)
1762
1763
1764
1765    #Interpolate using quantity values
1766    if verbose: print 'Interpolating'
1767    grid_values = interp.interpolate(q).flat
1768
1769
1770    if verbose:
1771        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1772                                                      max(grid_values))
1773
1774    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1775    P = interp.mesh.get_boundary_polygon()
1776    outside_indices = outside_polygon(grid_points, P, closed=True)
1777
1778    for i in outside_indices:
1779        grid_values[i] = NODATA_value
1780
1781
1782
1783
1784    if format.lower() == 'ers':
1785        # setup ERS header information
1786        grid_values = reshape(grid_values,(nrows, ncols))
1787        header = {}
1788        header['datum'] = '"' + datum + '"'
1789        # FIXME The use of hardwired UTM and zone number needs to be made optional
1790        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1791        header['projection'] = '"UTM-' + str(zone) + '"'
1792        header['coordinatetype'] = 'EN'
1793        if header['coordinatetype'] == 'LL':
1794            header['longitude'] = str(newxllcorner)
1795            header['latitude'] = str(newyllcorner)
1796        elif header['coordinatetype'] == 'EN':
1797            header['eastings'] = str(newxllcorner)
1798            header['northings'] = str(newyllcorner)
1799        header['nullcellvalue'] = str(NODATA_value)
1800        header['xdimension'] = str(cellsize)
1801        header['ydimension'] = str(cellsize)
1802        header['value'] = '"' + quantity + '"'
1803        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1804
1805
1806        #Write
1807        if verbose: print 'Writing %s' %demfile
1808        import ermapper_grids
1809        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1810
1811        fid.close()
1812    else:
1813        #Write to Ascii format
1814
1815        #Write prj file
1816        prjfile = basename_out + '.prj'
1817
1818        if verbose: print 'Writing %s' %prjfile
1819        prjid = open(prjfile, 'w')
1820        prjid.write('Projection    %s\n' %'UTM')
1821        prjid.write('Zone          %d\n' %zone)
1822        prjid.write('Datum         %s\n' %datum)
1823        prjid.write('Zunits        NO\n')
1824        prjid.write('Units         METERS\n')
1825        prjid.write('Spheroid      %s\n' %datum)
1826        prjid.write('Xshift        %d\n' %false_easting)
1827        prjid.write('Yshift        %d\n' %false_northing)
1828        prjid.write('Parameters\n')
1829        prjid.close()
1830
1831
1832
1833        if verbose: print 'Writing %s' %demfile
1834
1835        ascid = open(demfile, 'w')
1836
1837        ascid.write('ncols         %d\n' %ncols)
1838        ascid.write('nrows         %d\n' %nrows)
1839        ascid.write('xllcorner     %d\n' %newxllcorner)
1840        ascid.write('yllcorner     %d\n' %newyllcorner)
1841        ascid.write('cellsize      %f\n' %cellsize)
1842        ascid.write('NODATA_value  %d\n' %NODATA_value)
1843
1844
1845        #Get bounding polygon from mesh
1846        #P = interp.mesh.get_boundary_polygon()
1847        #inside_indices = inside_polygon(grid_points, P)
1848
1849        for i in range(nrows):
1850            if verbose and i%((nrows+10)/10)==0:
1851                print 'Doing row %d of %d' %(i, nrows)
1852
1853            base_index = (nrows-i-1)*ncols
1854
1855            slice = grid_values[base_index:base_index+ncols]
1856            s = array2string(slice, max_line_width=sys.maxint)
1857            ascid.write(s[1:-1] + '\n')
1858
1859
1860            #print
1861            #for j in range(ncols):
1862            #    index = base_index+j#
1863            #    print grid_values[index],
1864            #    ascid.write('%f ' %grid_values[index])
1865            #ascid.write('\n')
1866
1867        #Close
1868        ascid.close()
1869        fid.close()
1870
1871#Backwards compatibility
1872def sww2asc(basename_in, basename_out = None,
1873            quantity = None,
1874            timestep = None,
1875            reduction = None,
1876            cellsize = 10,
1877            verbose = False,
1878            origin = None):
1879    print 'sww2asc will soon be obsoleted - please use sww2dem'
1880    sww2dem(basename_in,
1881            basename_out = basename_out,
1882            quantity = quantity,
1883            timestep = timestep,
1884            reduction = reduction,
1885            cellsize = cellsize,
1886            verbose = verbose,
1887            origin = origin,
1888            datum = 'WGS84',
1889            format = 'asc')
1890
1891def sww2ers(basename_in, basename_out = None,
1892            quantity = None,
1893            timestep = None,
1894            reduction = None,
1895            cellsize = 10,
1896            verbose = False,
1897            origin = None,
1898            datum = 'WGS84'):
1899    print 'sww2ers will soon be obsoleted - please use sww2dem'
1900    sww2dem(basename_in,
1901            basename_out = basename_out,
1902            quantity = quantity,
1903            timestep = timestep,
1904            reduction = reduction,
1905            cellsize = cellsize,
1906            verbose = verbose,
1907            origin = origin,
1908            datum = datum,
1909            format = 'ers')
1910################################# END COMPATIBILITY ##############
1911
1912
1913
1914def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1915                                  use_cache = False,
1916                                  verbose = False):
1917    """Read Digitial Elevation model from the following ASCII format (.asc)   
1918
1919    Example:
1920
1921    ncols         3121
1922    nrows         1800
1923    xllcorner     722000
1924    yllcorner     5893000
1925    cellsize      25
1926    NODATA_value  -9999
1927    138.3698 137.4194 136.5062 135.5558 ..........
1928
1929    Convert basename_in + '.asc' to NetCDF format (.dem)
1930    mimicking the ASCII format closely.
1931
1932
1933    An accompanying file with same basename_in but extension .prj must exist
1934    and is used to fix the UTM zone, datum, false northings and eastings.
1935
1936    The prj format is assumed to be as
1937
1938    Projection    UTM
1939    Zone          56
1940    Datum         WGS84
1941    Zunits        NO
1942    Units         METERS
1943    Spheroid      WGS84
1944    Xshift        0.0000000000
1945    Yshift        10000000.0000000000
1946    Parameters
1947    """
1948
1949
1950
1951    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1952
1953    if use_cache is True:
1954        from caching import cache
1955        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1956                       dependencies = [basename_in + '.asc'],
1957                       verbose = verbose)
1958
1959    else:
1960        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1961       
1962    return result
1963
1964   
1965   
1966
1967
1968
1969def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1970                                  verbose = False):
1971    """Read Digitial Elevation model from the following ASCII format (.asc)
1972
1973    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1974    """
1975
1976    import os
1977    from Scientific.IO.NetCDF import NetCDFFile
1978    from Numeric import Float, array
1979
1980    #root, ext = os.path.splitext(basename_in)
1981    root = basename_in
1982
1983    ###########################################
1984    # Read Meta data
1985    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1986    metadatafile = open(root + '.prj')
1987    metalines = metadatafile.readlines()
1988    metadatafile.close()
1989
1990    L = metalines[0].strip().split()
1991    assert L[0].strip().lower() == 'projection'
1992    projection = L[1].strip()                   #TEXT
1993
1994    L = metalines[1].strip().split()
1995    assert L[0].strip().lower() == 'zone'
1996    zone = int(L[1].strip())
1997
1998    L = metalines[2].strip().split()
1999    assert L[0].strip().lower() == 'datum'
2000    datum = L[1].strip()                        #TEXT
2001
2002    L = metalines[3].strip().split()
2003    assert L[0].strip().lower() == 'zunits'     #IGNORE
2004    zunits = L[1].strip()                       #TEXT
2005
2006    L = metalines[4].strip().split()
2007    assert L[0].strip().lower() == 'units'
2008    units = L[1].strip()                        #TEXT
2009
2010    L = metalines[5].strip().split()
2011    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2012    spheroid = L[1].strip()                     #TEXT
2013
2014    L = metalines[6].strip().split()
2015    assert L[0].strip().lower() == 'xshift'
2016    false_easting = float(L[1].strip())
2017
2018    L = metalines[7].strip().split()
2019    assert L[0].strip().lower() == 'yshift'
2020    false_northing = float(L[1].strip())
2021
2022    #print false_easting, false_northing, zone, datum
2023
2024
2025    ###########################################
2026    #Read DEM data
2027
2028    datafile = open(basename_in + '.asc')
2029
2030    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2031    lines = datafile.readlines()
2032    datafile.close()
2033
2034    if verbose: print 'Got', len(lines), ' lines'
2035
2036    ncols = int(lines[0].split()[1].strip())
2037    nrows = int(lines[1].split()[1].strip())
2038    xllcorner = float(lines[2].split()[1].strip())
2039    yllcorner = float(lines[3].split()[1].strip())
2040    cellsize = float(lines[4].split()[1].strip())
2041    NODATA_value = int(lines[5].split()[1].strip())
2042
2043    assert len(lines) == nrows + 6
2044
2045
2046    ##########################################
2047
2048
2049    if basename_out == None:
2050        netcdfname = root + '.dem'
2051    else:
2052        netcdfname = basename_out + '.dem'
2053
2054    if verbose: print 'Store to NetCDF file %s' %netcdfname
2055    # NetCDF file definition
2056    fid = NetCDFFile(netcdfname, 'w')
2057
2058    #Create new file
2059    fid.institution = 'Geoscience Australia'
2060    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2061                      'of spatial point data'
2062
2063    fid.ncols = ncols
2064    fid.nrows = nrows
2065    fid.xllcorner = xllcorner
2066    fid.yllcorner = yllcorner
2067    fid.cellsize = cellsize
2068    fid.NODATA_value = NODATA_value
2069
2070    fid.zone = zone
2071    fid.false_easting = false_easting
2072    fid.false_northing = false_northing
2073    fid.projection = projection
2074    fid.datum = datum
2075    fid.units = units
2076
2077
2078    # dimension definitions
2079    fid.createDimension('number_of_rows', nrows)
2080    fid.createDimension('number_of_columns', ncols)
2081
2082    # variable definitions
2083    fid.createVariable('elevation', Float, ('number_of_rows',
2084                                            'number_of_columns'))
2085
2086    # Get handles to the variables
2087    elevation = fid.variables['elevation']
2088
2089    #Store data
2090    n = len(lines[6:])
2091    for i, line in enumerate(lines[6:]):
2092        fields = line.split()
2093        if verbose and i%((n+10)/10)==0:
2094            print 'Processing row %d of %d' %(i, nrows)
2095
2096        elevation[i, :] = array([float(x) for x in fields])
2097
2098    fid.close()
2099
2100
2101
2102
2103
2104def ferret2sww(basename_in, basename_out = None,
2105               verbose = False,
2106               minlat = None, maxlat = None,
2107               minlon = None, maxlon = None,
2108               mint = None, maxt = None, mean_stage = 0,
2109               origin = None, zscale = 1,
2110               fail_on_NaN = True,
2111               NaN_filler = 0,
2112               elevation = None,
2113               inverted_bathymetry = False
2114               ): #FIXME: Bathymetry should be obtained
2115                                  #from MOST somehow.
2116                                  #Alternatively from elsewhere
2117                                  #or, as a last resort,
2118                                  #specified here.
2119                                  #The value of -100 will work
2120                                  #for the Wollongong tsunami
2121                                  #scenario but is very hacky
2122    """Convert 'Ferret' NetCDF format for wave propagation to
2123    sww format native to pyvolution.
2124
2125    Specify only basename_in and read files of the form
2126    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2127    relative height, x-velocity and y-velocity, respectively.
2128
2129    Also convert latitude and longitude to UTM. All coordinates are
2130    assumed to be given in the GDA94 datum.
2131
2132    min's and max's: If omitted - full extend is used.
2133    To include a value min may equal it, while max must exceed it.
2134    Lat and lon are assuemd to be in decimal degrees
2135
2136    origin is a 3-tuple with geo referenced
2137    UTM coordinates (zone, easting, northing)
2138
2139    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2140    which means that longitude is the fastest
2141    varying dimension (row major order, so to speak)
2142
2143    ferret2sww uses grid points as vertices in a triangular grid
2144    counting vertices from lower left corner upwards, then right
2145    """
2146
2147    import os
2148    from Scientific.IO.NetCDF import NetCDFFile
2149    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2150    from Numeric import allclose, around
2151
2152    precision = Float
2153
2154    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2155
2156    if minlat != None:
2157        assert -90 < minlat < 90 , msg
2158    if maxlat != None:
2159        assert -90 < maxlat < 90 , msg
2160    if minlon != None:
2161        assert -180 < minlon < 180 , msg
2162    if maxlon != None:
2163        assert -180 < maxlon < 180 , msg
2164 
2165
2166    #Get NetCDF data
2167    if verbose: print 'Reading files %s_*.nc' %basename_in
2168    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2169    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2170    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2171    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2172
2173    if basename_out is None:
2174        swwname = basename_in + '.sww'
2175    else:
2176        swwname = basename_out + '.sww'
2177
2178    times = file_h.variables['TIME']
2179    latitudes = file_h.variables['LAT']
2180    longitudes = file_h.variables['LON']
2181
2182
2183
2184    #Precision used by most for lat/lon is 4 or 5 decimals
2185    e_lat = around(file_e.variables['LAT'][:], 5)
2186    e_lon = around(file_e.variables['LON'][:], 5)
2187
2188    #Check that files are compatible
2189    assert allclose(latitudes, file_u.variables['LAT'])
2190    assert allclose(latitudes, file_v.variables['LAT'])
2191    assert allclose(latitudes, e_lat)
2192
2193    assert allclose(longitudes, file_u.variables['LON'])
2194    assert allclose(longitudes, file_v.variables['LON'])
2195    assert allclose(longitudes, e_lon)
2196
2197
2198
2199    if mint == None:
2200        jmin = 0
2201    else:
2202        jmin = searchsorted(times, mint)
2203
2204    if maxt == None:
2205        jmax=len(times)
2206    else:
2207        jmax = searchsorted(times, maxt)
2208
2209    if minlat == None:
2210        kmin=0
2211    else:
2212        kmin = searchsorted(latitudes, minlat)
2213
2214    if maxlat == None:
2215        kmax = len(latitudes)
2216    else:
2217        kmax = searchsorted(latitudes, maxlat)
2218
2219    if minlon == None:
2220        lmin=0
2221    else:
2222        lmin = searchsorted(longitudes, minlon)
2223
2224    if maxlon == None:
2225        lmax = len(longitudes)
2226    else:
2227        lmax = searchsorted(longitudes, maxlon)
2228
2229
2230
2231    times = times[jmin:jmax]
2232    latitudes = latitudes[kmin:kmax]
2233    longitudes = longitudes[lmin:lmax]
2234
2235
2236    if verbose: print 'cropping'
2237    zname = 'ELEVATION'
2238
2239
2240    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2241    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2242    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2243    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2244
2245    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2246    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2247    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2248    #        from Numeric import asarray
2249    #        elevations=elevations.tolist()
2250    #        elevations.reverse()
2251    #        elevations=asarray(elevations)
2252    #    else:
2253    #        from Numeric import asarray
2254    #        elevations=elevations.tolist()
2255    #        elevations.reverse()
2256    #        elevations=asarray(elevations)
2257    #        'print hmmm'
2258
2259
2260
2261    #Get missing values
2262    nan_ha = file_h.variables['HA'].missing_value[0]
2263    nan_ua = file_u.variables['UA'].missing_value[0]
2264    nan_va = file_v.variables['VA'].missing_value[0]
2265    if hasattr(file_e.variables[zname],'missing_value'):
2266        nan_e  = file_e.variables[zname].missing_value[0]
2267    else:
2268        nan_e = None
2269
2270    #Cleanup
2271    from Numeric import sometrue
2272
2273    missing = (amplitudes == nan_ha)
2274    if sometrue (missing):
2275        if fail_on_NaN:
2276            msg = 'NetCDFFile %s contains missing values'\
2277                  %(basename_in+'_ha.nc')
2278            raise msg
2279        else:
2280            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2281
2282    missing = (uspeed == nan_ua)
2283    if sometrue (missing):
2284        if fail_on_NaN:
2285            msg = 'NetCDFFile %s contains missing values'\
2286                  %(basename_in+'_ua.nc')
2287            raise msg
2288        else:
2289            uspeed = uspeed*(missing==0) + missing*NaN_filler
2290
2291    missing = (vspeed == nan_va)
2292    if sometrue (missing):
2293        if fail_on_NaN:
2294            msg = 'NetCDFFile %s contains missing values'\
2295                  %(basename_in+'_va.nc')
2296            raise msg
2297        else:
2298            vspeed = vspeed*(missing==0) + missing*NaN_filler
2299
2300
2301    missing = (elevations == nan_e)
2302    if sometrue (missing):
2303        if fail_on_NaN:
2304            msg = 'NetCDFFile %s contains missing values'\
2305                  %(basename_in+'_e.nc')
2306            raise msg
2307        else:
2308            elevations = elevations*(missing==0) + missing*NaN_filler
2309
2310    #######
2311
2312
2313
2314    number_of_times = times.shape[0]
2315    number_of_latitudes = latitudes.shape[0]
2316    number_of_longitudes = longitudes.shape[0]
2317
2318    assert amplitudes.shape[0] == number_of_times
2319    assert amplitudes.shape[1] == number_of_latitudes
2320    assert amplitudes.shape[2] == number_of_longitudes
2321
2322    if verbose:
2323        print '------------------------------------------------'
2324        print 'Statistics:'
2325        print '  Extent (lat/lon):'
2326        print '    lat in [%f, %f], len(lat) == %d'\
2327              %(min(latitudes.flat), max(latitudes.flat),
2328                len(latitudes.flat))
2329        print '    lon in [%f, %f], len(lon) == %d'\
2330              %(min(longitudes.flat), max(longitudes.flat),
2331                len(longitudes.flat))
2332        print '    t in [%f, %f], len(t) == %d'\
2333              %(min(times.flat), max(times.flat), len(times.flat))
2334
2335        q = amplitudes.flat
2336        name = 'Amplitudes (ha) [cm]'
2337        print %s in [%f, %f]' %(name, min(q), max(q))
2338
2339        q = uspeed.flat
2340        name = 'Speeds (ua) [cm/s]'
2341        print %s in [%f, %f]' %(name, min(q), max(q))
2342
2343        q = vspeed.flat
2344        name = 'Speeds (va) [cm/s]'
2345        print %s in [%f, %f]' %(name, min(q), max(q))
2346
2347        q = elevations.flat
2348        name = 'Elevations (e) [m]'
2349        print %s in [%f, %f]' %(name, min(q), max(q))
2350
2351
2352    #print number_of_latitudes, number_of_longitudes
2353    number_of_points = number_of_latitudes*number_of_longitudes
2354    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2355
2356
2357    file_h.close()
2358    file_u.close()
2359    file_v.close()
2360    file_e.close()
2361
2362
2363    # NetCDF file definition
2364    outfile = NetCDFFile(swwname, 'w')
2365
2366    #Create new file
2367    outfile.institution = 'Geoscience Australia'
2368    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2369                          %(basename_in + '_ha.nc',
2370                            basename_in + '_ua.nc',
2371                            basename_in + '_va.nc',
2372                            basename_in + '_e.nc')
2373
2374
2375    #For sww compatibility
2376    outfile.smoothing = 'Yes'
2377    outfile.order = 1
2378
2379    #Start time in seconds since the epoch (midnight 1/1/1970)
2380    outfile.starttime = starttime = times[0]
2381    times = times - starttime  #Store relative times
2382
2383    # dimension definitions
2384    outfile.createDimension('number_of_volumes', number_of_volumes)
2385
2386    outfile.createDimension('number_of_vertices', 3)
2387    outfile.createDimension('number_of_points', number_of_points)
2388
2389
2390    #outfile.createDimension('number_of_timesteps', len(times))
2391    outfile.createDimension('number_of_timesteps', len(times))
2392
2393    # variable definitions
2394    outfile.createVariable('x', precision, ('number_of_points',))
2395    outfile.createVariable('y', precision, ('number_of_points',))
2396    outfile.createVariable('elevation', precision, ('number_of_points',))
2397
2398    #FIXME: Backwards compatibility
2399    outfile.createVariable('z', precision, ('number_of_points',))
2400    #################################
2401
2402    outfile.createVariable('volumes', Int, ('number_of_volumes',
2403                                            'number_of_vertices'))
2404
2405    outfile.createVariable('time', precision,
2406                           ('number_of_timesteps',))
2407
2408    outfile.createVariable('stage', precision,
2409                           ('number_of_timesteps',
2410                            'number_of_points'))
2411
2412    outfile.createVariable('xmomentum', precision,
2413                           ('number_of_timesteps',
2414                            'number_of_points'))
2415
2416    outfile.createVariable('ymomentum', precision,
2417                           ('number_of_timesteps',
2418                            'number_of_points'))
2419
2420
2421    #Store
2422    from coordinate_transforms.redfearn import redfearn
2423    x = zeros(number_of_points, Float)  #Easting
2424    y = zeros(number_of_points, Float)  #Northing
2425
2426
2427    if verbose: print 'Making triangular grid'
2428    #Check zone boundaries
2429    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2430
2431    vertices = {}
2432    i = 0
2433    for k, lat in enumerate(latitudes):       #Y direction
2434        for l, lon in enumerate(longitudes):  #X direction
2435
2436            vertices[l,k] = i
2437
2438            zone, easting, northing = redfearn(lat,lon)
2439
2440            msg = 'Zone boundary crossed at longitude =', lon
2441            #assert zone == refzone, msg
2442            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2443            x[i] = easting
2444            y[i] = northing
2445            i += 1
2446
2447
2448    #Construct 2 triangles per 'rectangular' element
2449    volumes = []
2450    for l in range(number_of_longitudes-1):    #X direction
2451        for k in range(number_of_latitudes-1): #Y direction
2452            v1 = vertices[l,k+1]
2453            v2 = vertices[l,k]
2454            v3 = vertices[l+1,k+1]
2455            v4 = vertices[l+1,k]
2456
2457            volumes.append([v1,v2,v3]) #Upper element
2458            volumes.append([v4,v3,v2]) #Lower element
2459
2460    volumes = array(volumes)
2461
2462    if origin == None:
2463        zone = refzone
2464        xllcorner = min(x)
2465        yllcorner = min(y)
2466    else:
2467        zone = origin[0]
2468        xllcorner = origin[1]
2469        yllcorner = origin[2]
2470
2471
2472    outfile.xllcorner = xllcorner
2473    outfile.yllcorner = yllcorner
2474    outfile.zone = zone
2475
2476
2477    if elevation is not None:
2478        z = elevation
2479    else:
2480        if inverted_bathymetry:
2481            z = -1*elevations
2482        else:
2483            z = elevations
2484        #FIXME: z should be obtained from MOST and passed in here
2485
2486    from Numeric import resize
2487    z = resize(z,outfile.variables['z'][:].shape)
2488    outfile.variables['x'][:] = x - xllcorner
2489    outfile.variables['y'][:] = y - yllcorner
2490    outfile.variables['z'][:] = z             #FIXME HACK
2491    outfile.variables['elevation'][:] = z
2492    outfile.variables['time'][:] = times   #Store time relative
2493    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2494
2495
2496
2497    #Time stepping
2498    stage = outfile.variables['stage']
2499    xmomentum = outfile.variables['xmomentum']
2500    ymomentum = outfile.variables['ymomentum']
2501
2502    if verbose: print 'Converting quantities'
2503    n = len(times)
2504    for j in range(n):
2505        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2506        i = 0
2507        for k in range(number_of_latitudes):      #Y direction
2508            for l in range(number_of_longitudes): #X direction
2509                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2510                stage[j,i] = w
2511                h = w - z[i]
2512                xmomentum[j,i] = uspeed[j,k,l]/100*h
2513                ymomentum[j,i] = vspeed[j,k,l]/100*h
2514                i += 1
2515
2516    #outfile.close()
2517
2518    #FIXME: Refactor using code from file_function.statistics
2519    #Something like print swwstats(swwname)
2520    if verbose:
2521        x = outfile.variables['x'][:]
2522        y = outfile.variables['y'][:]
2523        times = outfile.variables['time'][:]
2524        print '------------------------------------------------'
2525        print 'Statistics of output file:'
2526        print '  Name: %s' %swwname
2527        print '  Reference:'
2528        print '    Lower left corner: [%f, %f]'\
2529              %(xllcorner, yllcorner)
2530        print '    Start time: %f' %starttime
2531        print '  Extent:'
2532        print '    x [m] in [%f, %f], len(x) == %d'\
2533              %(min(x.flat), max(x.flat), len(x.flat))
2534        print '    y [m] in [%f, %f], len(y) == %d'\
2535              %(min(y.flat), max(y.flat), len(y.flat))
2536        print '    t [s] in [%f, %f], len(t) == %d'\
2537              %(min(times), max(times), len(times))
2538        print '  Quantities [SI units]:'
2539        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2540            q = outfile.variables[name][:].flat
2541            print '    %s in [%f, %f]' %(name, min(q), max(q))
2542
2543
2544
2545    outfile.close()
2546
2547
2548
2549
2550
2551def timefile2netcdf(filename, quantity_names = None):
2552    """Template for converting typical text files with time series to
2553    NetCDF tms file.
2554
2555
2556    The file format is assumed to be either two fields separated by a comma:
2557
2558        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2559
2560    E.g
2561
2562      31/08/04 00:00:00, 1.328223 0 0
2563      31/08/04 00:15:00, 1.292912 0 0
2564
2565    will provide a time dependent function f(t) with three attributes
2566
2567    filename is assumed to be the rootname with extenisons .txt and .sww
2568    """
2569
2570    import time, calendar
2571    from Numeric import array
2572    from config import time_format
2573    from utilities.numerical_tools import ensure_numeric   
2574
2575
2576
2577    fid = open(filename + '.txt')
2578    line = fid.readline()
2579    fid.close()
2580
2581    fields = line.split(',')
2582    msg = 'File %s must have the format date, value0 value1 value2 ...'
2583    assert len(fields) == 2, msg
2584
2585    try:
2586        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2587    except ValueError:
2588        msg = 'First field in file %s must be' %filename
2589        msg += ' date-time with format %s.\n' %time_format
2590        msg += 'I got %s instead.' %fields[0]
2591        raise msg
2592
2593
2594    #Split values
2595    values = []
2596    for value in fields[1].split():
2597        values.append(float(value))
2598
2599    q = ensure_numeric(values)
2600
2601    msg = 'ERROR: File must contain at least one independent value'
2602    assert len(q.shape) == 1, msg
2603
2604
2605
2606    #Read times proper
2607    from Numeric import zeros, Float, alltrue
2608    from config import time_format
2609    import time, calendar
2610
2611    fid = open(filename + '.txt')
2612    lines = fid.readlines()
2613    fid.close()
2614
2615    N = len(lines)
2616    d = len(q)
2617
2618    T = zeros(N, Float)       #Time
2619    Q = zeros((N, d), Float)  #Values
2620
2621    for i, line in enumerate(lines):
2622        fields = line.split(',')
2623        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2624
2625        T[i] = realtime - starttime
2626
2627        for j, value in enumerate(fields[1].split()):
2628            Q[i, j] = float(value)
2629
2630    msg = 'File %s must list time as a monotonuosly ' %filename
2631    msg += 'increasing sequence'
2632    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2633
2634
2635    #Create NetCDF file
2636    from Scientific.IO.NetCDF import NetCDFFile
2637
2638    fid = NetCDFFile(filename + '.tms', 'w')
2639
2640
2641    fid.institution = 'Geoscience Australia'
2642    fid.description = 'Time series'
2643
2644
2645    #Reference point
2646    #Start time in seconds since the epoch (midnight 1/1/1970)
2647    #FIXME: Use Georef
2648    fid.starttime = starttime
2649
2650
2651    # dimension definitions
2652    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2653    #fid.createDimension('number_of_vertices', 3)
2654
2655
2656    fid.createDimension('number_of_timesteps', len(T))
2657
2658    fid.createVariable('time', Float, ('number_of_timesteps',))
2659
2660    fid.variables['time'][:] = T
2661
2662    for i in range(Q.shape[1]):
2663        try:
2664            name = quantity_names[i]
2665        except:
2666            name = 'Attribute%d'%i
2667
2668        fid.createVariable(name, Float, ('number_of_timesteps',))
2669        fid.variables[name][:] = Q[:,i]
2670
2671    fid.close()
2672
2673
2674def extent_sww(file_name):
2675    """
2676    Read in an sww file.
2677
2678    Input;
2679    file_name - the sww file
2680
2681    Output;
2682    z - Vector of bed elevation
2683    volumes - Array.  Each row has 3 values, representing
2684    the vertices that define the volume
2685    time - Vector of the times where there is stage information
2686    stage - array with respect to time and vertices (x,y)
2687    """
2688
2689
2690    from Scientific.IO.NetCDF import NetCDFFile
2691
2692    #Check contents
2693    #Get NetCDF
2694    fid = NetCDFFile(file_name, 'r')
2695
2696    # Get the variables
2697    x = fid.variables['x'][:]
2698    y = fid.variables['y'][:]
2699    stage = fid.variables['stage'][:]
2700    #print "stage",stage
2701    #print "stage.shap",stage.shape
2702    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2703    #print "min(stage)",min(stage)
2704
2705    fid.close()
2706
2707    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2708
2709
2710def sww2domain(filename,boundary=None,t=None,\
2711               fail_if_NaN=True,NaN_filler=0\
2712               ,verbose = False,very_verbose = False):
2713    """
2714    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2715
2716    Boundary is not recommended if domain.smooth is not selected, as it
2717    uses unique coordinates, but not unique boundaries. This means that
2718    the boundary file will not be compatable with the coordinates, and will
2719    give a different final boundary, or crash.
2720    """
2721    NaN=9.969209968386869e+036
2722    #initialise NaN.
2723
2724    from Scientific.IO.NetCDF import NetCDFFile
2725    from shallow_water import Domain
2726    from Numeric import asarray, transpose, resize
2727
2728    if verbose: print 'Reading from ', filename
2729    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2730    time = fid.variables['time']       #Timesteps
2731    if t is None:
2732        t = time[-1]
2733    time_interp = get_time_interp(time,t)
2734
2735    # Get the variables as Numeric arrays
2736    x = fid.variables['x'][:]             #x-coordinates of vertices
2737    y = fid.variables['y'][:]             #y-coordinates of vertices
2738    elevation = fid.variables['elevation']     #Elevation
2739    stage = fid.variables['stage']     #Water level
2740    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2741    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2742
2743    starttime = fid.starttime[0]
2744    volumes = fid.variables['volumes'][:] #Connectivity
2745    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2746
2747    conserved_quantities = []
2748    interpolated_quantities = {}
2749    other_quantities = []
2750
2751    # get geo_reference
2752    #sww files don't have to have a geo_ref
2753    try:
2754        geo_reference = Geo_reference(NetCDFObject=fid)
2755    except: #AttributeError, e:
2756        geo_reference = None
2757
2758    if verbose: print '    getting quantities'
2759    for quantity in fid.variables.keys():
2760        dimensions = fid.variables[quantity].dimensions
2761        if 'number_of_timesteps' in dimensions:
2762            conserved_quantities.append(quantity)
2763            interpolated_quantities[quantity]=\
2764                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2765        else: other_quantities.append(quantity)
2766
2767    other_quantities.remove('x')
2768    other_quantities.remove('y')
2769    other_quantities.remove('z')
2770    other_quantities.remove('volumes')
2771
2772    conserved_quantities.remove('time')
2773
2774    if verbose: print '    building domain'
2775    #    From domain.Domain:
2776    #    domain = Domain(coordinates, volumes,\
2777    #                    conserved_quantities = conserved_quantities,\
2778    #                    other_quantities = other_quantities,zone=zone,\
2779    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2780
2781    #   From shallow_water.Domain:
2782    coordinates=coordinates.tolist()
2783    volumes=volumes.tolist()
2784    #FIXME:should this be in mesh?(peter row)
2785    if fid.smoothing == 'Yes': unique = False
2786    else: unique = True
2787    if unique:
2788        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2789
2790
2791    try:   
2792        domain = Domain(coordinates, volumes, boundary)
2793    except AssertionError, e:
2794        fid.close()
2795        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2796        raise msg
2797
2798    if not boundary is None:
2799        domain.boundary = boundary
2800
2801    domain.geo_reference = geo_reference
2802
2803    domain.starttime=float(starttime)+float(t)
2804    domain.time=0.0
2805
2806    for quantity in other_quantities:
2807        try:
2808            NaN = fid.variables[quantity].missing_value
2809        except:
2810            pass #quantity has no missing_value number
2811        X = fid.variables[quantity][:]
2812        if very_verbose:
2813            print '       ',quantity
2814            print '        NaN =',NaN
2815            print '        max(X)'
2816            print '       ',max(X)
2817            print '        max(X)==NaN'
2818            print '       ',max(X)==NaN
2819            print ''
2820        if (max(X)==NaN) or (min(X)==NaN):
2821            if fail_if_NaN:
2822                msg = 'quantity "%s" contains no_data entry'%quantity
2823                raise msg
2824            else:
2825                data = (X<>NaN)
2826                X = (X*data)+(data==0)*NaN_filler
2827        if unique:
2828            X = resize(X,(len(X)/3,3))
2829        domain.set_quantity(quantity,X)
2830    #
2831    for quantity in conserved_quantities:
2832        try:
2833            NaN = fid.variables[quantity].missing_value
2834        except:
2835            pass #quantity has no missing_value number
2836        X = interpolated_quantities[quantity]
2837        if very_verbose:
2838            print '       ',quantity
2839            print '        NaN =',NaN
2840            print '        max(X)'
2841            print '       ',max(X)
2842            print '        max(X)==NaN'
2843            print '       ',max(X)==NaN
2844            print ''
2845        if (max(X)==NaN) or (min(X)==NaN):
2846            if fail_if_NaN:
2847                msg = 'quantity "%s" contains no_data entry'%quantity
2848                raise msg
2849            else:
2850                data = (X<>NaN)
2851                X = (X*data)+(data==0)*NaN_filler
2852        if unique:
2853            X = resize(X,(X.shape[0]/3,3))
2854        domain.set_quantity(quantity,X)
2855       
2856    fid.close()
2857    return domain
2858
2859def interpolated_quantity(saved_quantity,time_interp):
2860
2861    #given an index and ratio, interpolate quantity with respect to time.
2862    index,ratio = time_interp
2863    Q = saved_quantity
2864    if ratio > 0:
2865        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2866    else:
2867        q = Q[index]
2868    #Return vector of interpolated values
2869    return q
2870
2871def get_time_interp(time,t=None):
2872    #Finds the ratio and index for time interpolation.
2873    #It is borrowed from previous pyvolution code.
2874    if t is None:
2875        t=time[-1]
2876        index = -1
2877        ratio = 0.
2878    else:
2879        T = time
2880        tau = t
2881        index=0
2882        msg = 'Time interval derived from file %s [%s:%s]'\
2883            %('FIXMEfilename', T[0], T[-1])
2884        msg += ' does not match model time: %s' %tau
2885        if tau < time[0]: raise msg
2886        if tau > time[-1]: raise msg
2887        while tau > time[index]: index += 1
2888        while tau < time[index]: index -= 1
2889        if tau == time[index]:
2890            #Protect against case where tau == time[-1] (last time)
2891            # - also works in general when tau == time[i]
2892            ratio = 0
2893        else:
2894            #t is now between index and index+1
2895            ratio = (tau - time[index])/(time[index+1] - time[index])
2896    return (index,ratio)
2897
2898
2899def weed(coordinates,volumes,boundary = None):
2900    if type(coordinates)=='array':
2901        coordinates = coordinates.tolist()
2902    if type(volumes)=='array':
2903        volumes = volumes.tolist()
2904
2905    unique = False
2906    point_dict = {}
2907    same_point = {}
2908    for i in range(len(coordinates)):
2909        point = tuple(coordinates[i])
2910        if point_dict.has_key(point):
2911            unique = True
2912            same_point[i]=point
2913            #to change all point i references to point j
2914        else:
2915            point_dict[point]=i
2916            same_point[i]=point
2917
2918    coordinates = []
2919    i = 0
2920    for point in point_dict.keys():
2921        point = tuple(point)
2922        coordinates.append(list(point))
2923        point_dict[point]=i
2924        i+=1
2925
2926
2927    for volume in volumes:
2928        for i in range(len(volume)):
2929            index = volume[i]
2930            if index>-1:
2931                volume[i]=point_dict[same_point[index]]
2932
2933    new_boundary = {}
2934    if not boundary is None:
2935        for segment in boundary.keys():
2936            point0 = point_dict[same_point[segment[0]]]
2937            point1 = point_dict[same_point[segment[1]]]
2938            label = boundary[segment]
2939            #FIXME should the bounday attributes be concaterated
2940            #('exterior, pond') or replaced ('pond')(peter row)
2941
2942            if new_boundary.has_key((point0,point1)):
2943                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2944                                              #+','+label
2945
2946            elif new_boundary.has_key((point1,point0)):
2947                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2948                                              #+','+label
2949            else: new_boundary[(point0,point1)]=label
2950
2951        boundary = new_boundary
2952
2953    return coordinates,volumes,boundary
2954
2955
2956def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2957                 verbose=False):
2958    """Read Digitial Elevation model from the following NetCDF format (.dem)
2959
2960    Example:
2961
2962    ncols         3121
2963    nrows         1800
2964    xllcorner     722000
2965    yllcorner     5893000
2966    cellsize      25
2967    NODATA_value  -9999
2968    138.3698 137.4194 136.5062 135.5558 ..........
2969
2970    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2971    """
2972
2973    import os
2974    from Scientific.IO.NetCDF import NetCDFFile
2975    from Numeric import Float, zeros, sum, reshape, equal
2976
2977    root = basename_in
2978    inname = root + '.dem'
2979
2980    #Open existing netcdf file to read
2981    infile = NetCDFFile(inname, 'r')
2982    if verbose: print 'Reading DEM from %s' %inname
2983
2984    #Read metadata
2985    ncols = infile.ncols[0]
2986    nrows = infile.nrows[0]
2987    xllcorner = infile.xllcorner[0]
2988    yllcorner = infile.yllcorner[0]
2989    cellsize = infile.cellsize[0]
2990    NODATA_value = infile.NODATA_value[0]
2991    zone = infile.zone[0]
2992    false_easting = infile.false_easting[0]
2993    false_northing = infile.false_northing[0]
2994    projection = infile.projection
2995    datum = infile.datum
2996    units = infile.units
2997
2998    dem_elevation = infile.variables['elevation']
2999
3000    #Get output file name
3001    if basename_out == None:
3002        outname = root + '_' + repr(cellsize_new) + '.dem'
3003    else:
3004        outname = basename_out + '.dem'
3005
3006    if verbose: print 'Write decimated NetCDF file to %s' %outname
3007
3008    #Determine some dimensions for decimated grid
3009    (nrows_stencil, ncols_stencil) = stencil.shape
3010    x_offset = ncols_stencil / 2
3011    y_offset = nrows_stencil / 2
3012    cellsize_ratio = int(cellsize_new / cellsize)
3013    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3014    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3015
3016    #Open netcdf file for output
3017    outfile = NetCDFFile(outname, 'w')
3018
3019    #Create new file
3020    outfile.institution = 'Geoscience Australia'
3021    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3022                           'of spatial point data'
3023    #Georeferencing
3024    outfile.zone = zone
3025    outfile.projection = projection
3026    outfile.datum = datum
3027    outfile.units = units
3028
3029    outfile.cellsize = cellsize_new
3030    outfile.NODATA_value = NODATA_value
3031    outfile.false_easting = false_easting
3032    outfile.false_northing = false_northing
3033
3034    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3035    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3036    outfile.ncols = ncols_new
3037    outfile.nrows = nrows_new
3038
3039    # dimension definition
3040    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3041
3042    # variable definition
3043    outfile.createVariable('elevation', Float, ('number_of_points',))
3044
3045    # Get handle to the variable
3046    elevation = outfile.variables['elevation']
3047
3048    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3049
3050    #Store data
3051    global_index = 0
3052    for i in range(nrows_new):
3053        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3054        lower_index = global_index
3055        telev =  zeros(ncols_new, Float)
3056        local_index = 0
3057        trow = i * cellsize_ratio
3058
3059        for j in range(ncols_new):
3060            tcol = j * cellsize_ratio
3061            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3062
3063            #if dem contains 1 or more NODATA_values set value in
3064            #decimated dem to NODATA_value, else compute decimated
3065            #value using stencil
3066            if sum(sum(equal(tmp, NODATA_value))) > 0:
3067                telev[local_index] = NODATA_value
3068            else:
3069                telev[local_index] = sum(sum(tmp * stencil))
3070
3071            global_index += 1
3072            local_index += 1
3073
3074        upper_index = global_index
3075
3076        elevation[lower_index:upper_index] = telev
3077
3078    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3079
3080    infile.close()
3081    outfile.close()
3082
3083
3084
3085
3086def tsh2sww(filename, verbose=False): #test_tsh2sww
3087    """
3088    to check if a tsh/msh file 'looks' good.
3089    """
3090
3091    #FIXME Move to data_manager
3092    from shallow_water import Domain
3093    from pmesh2domain import pmesh_to_domain_instance
3094    import time, os
3095    from data_manager import get_dataobject
3096    from os import sep, path
3097    from utilities.numerical_tools import mean
3098
3099    if verbose == True:print 'Creating domain from', filename
3100    domain = pmesh_to_domain_instance(filename, Domain)
3101    if verbose == True:print "Number of triangles = ", len(domain)
3102
3103    domain.smooth = True
3104    domain.format = 'sww'   #Native netcdf visualisation format
3105    file_path, filename = path.split(filename)
3106    filename, ext = path.splitext(filename)
3107    domain.filename = filename
3108    domain.reduction = mean
3109    if verbose == True:print "file_path",file_path
3110    if file_path == "":file_path = "."
3111    domain.set_datadir(file_path)
3112
3113    if verbose == True:
3114        print "Output written to " + domain.get_datadir() + sep + \
3115              domain.filename + "." + domain.format
3116    sww = get_dataobject(domain)
3117    sww.store_connectivity()
3118    sww.store_timestep('stage')
3119
3120
3121def asc_csiro2sww(bath_dir,
3122                  elevation_dir,
3123                  ucur_dir,
3124                  vcur_dir,
3125                  sww_file,
3126                  minlat = None, maxlat = None,
3127                  minlon = None, maxlon = None,
3128                  zscale=1,
3129                  mean_stage = 0,
3130                  fail_on_NaN = True,
3131                  elevation_NaN_filler = 0,
3132                  bath_prefix='ba',
3133                  elevation_prefix='el',
3134                  verbose=False):
3135    """
3136    Produce an sww boundary file, from esri ascii data from CSIRO.
3137
3138    Also convert latitude and longitude to UTM. All coordinates are
3139    assumed to be given in the GDA94 datum.
3140
3141    assume:
3142    All files are in esri ascii format
3143
3144    4 types of information
3145    bathymetry
3146    elevation
3147    u velocity
3148    v velocity
3149
3150    Assumptions
3151    The metadata of all the files is the same
3152    Each type is in a seperate directory
3153    One bath file with extention .000
3154    The time period is less than 24hrs and uniform.
3155    """
3156    from Scientific.IO.NetCDF import NetCDFFile
3157
3158    from coordinate_transforms.redfearn import redfearn
3159
3160    precision = Float # So if we want to change the precision its done here
3161
3162    # go in to the bath dir and load the only file,
3163    bath_files = os.listdir(bath_dir)
3164    #print "bath_files",bath_files
3165
3166    #fixme: make more general?
3167    bath_file = bath_files[0]
3168    bath_dir_file =  bath_dir + os.sep + bath_file
3169    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3170    #print "bath_metadata",bath_metadata
3171    #print "bath_grid",bath_grid
3172
3173    #Use the date.time of the bath file as a basis for
3174    #the start time for other files
3175    base_start = bath_file[-12:]
3176
3177    #go into the elevation dir and load the 000 file
3178    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3179                         + base_start
3180    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3181    #print "elevation_dir_file",elevation_dir_file
3182    #print "elevation_grid", elevation_grid
3183
3184    elevation_files = os.listdir(elevation_dir)
3185    ucur_files = os.listdir(ucur_dir)
3186    vcur_files = os.listdir(vcur_dir)
3187
3188    # the first elevation file should be the
3189    # file with the same base name as the bath data
3190    #print "elevation_files[0]",elevation_files[0]
3191    #print "'el' + base_start",'el' + base_start
3192    assert elevation_files[0] == 'el' + base_start
3193
3194    #assert bath_metadata == elevation_metadata
3195
3196
3197
3198    number_of_latitudes = bath_grid.shape[0]
3199    number_of_longitudes = bath_grid.shape[1]
3200    #number_of_times = len(os.listdir(elevation_dir))
3201    #number_of_points = number_of_latitudes*number_of_longitudes
3202    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3203
3204    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3205                  for x in range(number_of_longitudes)]
3206    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3207                 for y in range(number_of_latitudes)]
3208
3209     # reverse order of lat, so the fist lat represents the first grid row
3210    latitudes.reverse()
3211
3212    #print "latitudes - before _get_min_max_indexes",latitudes
3213    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3214                                                 minlat=minlat, maxlat=maxlat,
3215                                                 minlon=minlon, maxlon=maxlon)
3216
3217
3218    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3219    #print "bath_grid",bath_grid
3220    latitudes = latitudes[kmin:kmax]
3221    longitudes = longitudes[lmin:lmax]
3222    number_of_latitudes = len(latitudes)
3223    number_of_longitudes = len(longitudes)
3224    number_of_times = len(os.listdir(elevation_dir))
3225    number_of_points = number_of_latitudes*number_of_longitudes
3226    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3227    #print "number_of_points",number_of_points
3228
3229    #Work out the times
3230    if len(elevation_files) > 1:
3231        # Assume: The time period is less than 24hrs.
3232        time_period = (int(elevation_files[1][-3:]) - \
3233                      int(elevation_files[0][-3:]))*60*60
3234        times = [x*time_period for x in range(len(elevation_files))]
3235    else:
3236        times = [0.0]
3237    #print "times", times
3238    #print "number_of_latitudes", number_of_latitudes
3239    #print "number_of_longitudes", number_of_longitudes
3240    #print "number_of_times", number_of_times
3241    #print "latitudes", latitudes
3242    #print "longitudes", longitudes
3243
3244
3245    if verbose:
3246        print '------------------------------------------------'
3247        print 'Statistics:'
3248        print '  Extent (lat/lon):'
3249        print '    lat in [%f, %f], len(lat) == %d'\
3250              %(min(latitudes), max(latitudes),
3251                len(latitudes))
3252        print '    lon in [%f, %f], len(lon) == %d'\
3253              %(min(longitudes), max(longitudes),
3254                len(longitudes))
3255        print '    t in [%f, %f], len(t) == %d'\
3256              %(min(times), max(times), len(times))
3257
3258    ######### WRITE THE SWW FILE #############
3259    # NetCDF file definition
3260    outfile = NetCDFFile(sww_file, 'w')
3261
3262    #Create new file
3263    outfile.institution = 'Geoscience Australia'
3264    outfile.description = 'Converted from XXX'
3265
3266
3267    #For sww compatibility
3268    outfile.smoothing = 'Yes'
3269    outfile.order = 1
3270
3271    #Start time in seconds since the epoch (midnight 1/1/1970)
3272    outfile.starttime = starttime = times[0]
3273
3274
3275    # dimension definitions
3276    outfile.createDimension('number_of_volumes', number_of_volumes)
3277
3278    outfile.createDimension('number_of_vertices', 3)
3279    outfile.createDimension('number_of_points', number_of_points)
3280    outfile.createDimension('number_of_timesteps', number_of_times)
3281
3282    # variable definitions
3283    outfile.createVariable('x', precision, ('number_of_points',))
3284    outfile.createVariable('y', precision, ('number_of_points',))
3285    outfile.createVariable('elevation', precision, ('number_of_points',))
3286
3287    #FIXME: Backwards compatibility
3288    outfile.createVariable('z', precision, ('number_of_points',))
3289    #################################
3290
3291    outfile.createVariable('volumes', Int, ('number_of_volumes',
3292                                            'number_of_vertices'))
3293
3294    outfile.createVariable('time', precision,
3295                           ('number_of_timesteps',))
3296
3297    outfile.createVariable('stage', precision,
3298                           ('number_of_timesteps',
3299                            'number_of_points'))
3300
3301    outfile.createVariable('xmomentum', precision,
3302                           ('number_of_timesteps',
3303                            'number_of_points'))
3304
3305    outfile.createVariable('ymomentum', precision,
3306                           ('number_of_timesteps',
3307                            'number_of_points'))
3308
3309    #Store
3310    from coordinate_transforms.redfearn import redfearn
3311    x = zeros(number_of_points, Float)  #Easting
3312    y = zeros(number_of_points, Float)  #Northing
3313
3314    if verbose: print 'Making triangular grid'
3315    #Get zone of 1st point.
3316    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3317
3318    vertices = {}
3319    i = 0
3320    for k, lat in enumerate(latitudes):
3321        for l, lon in enumerate(longitudes):
3322
3323            vertices[l,k] = i
3324
3325            zone, easting, northing = redfearn(lat,lon)
3326
3327            msg = 'Zone boundary crossed at longitude =', lon
3328            #assert zone == refzone, msg
3329            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3330            x[i] = easting
3331            y[i] = northing
3332            i += 1
3333
3334
3335    #Construct 2 triangles per 'rectangular' element
3336    volumes = []
3337    for l in range(number_of_longitudes-1):    #X direction
3338        for k in range(number_of_latitudes-1): #Y direction
3339            v1 = vertices[l,k+1]
3340            v2 = vertices[l,k]
3341            v3 = vertices[l+1,k+1]
3342            v4 = vertices[l+1,k]
3343
3344            #Note, this is different to the ferrit2sww code
3345            #since the order of the lats is reversed.
3346            volumes.append([v1,v3,v2]) #Upper element
3347            volumes.append([v4,v2,v3]) #Lower element
3348
3349    volumes = array(volumes)
3350
3351    geo_ref = Geo_reference(refzone,min(x),min(y))
3352    geo_ref.write_NetCDF(outfile)
3353
3354    # This will put the geo ref in the middle
3355    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3356
3357
3358    if verbose:
3359        print '------------------------------------------------'
3360        print 'More Statistics:'
3361        print '  Extent (/lon):'
3362        print '    x in [%f, %f], len(lat) == %d'\
3363              %(min(x), max(x),
3364                len(x))
3365        print '    y in [%f, %f], len(lon) == %d'\
3366              %(min(y), max(y),
3367                len(y))
3368        print 'geo_ref: ',geo_ref
3369
3370    z = resize(bath_grid,outfile.variables['z'][:].shape)
3371    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3372    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3373    outfile.variables['z'][:] = z
3374    outfile.variables['elevation'][:] = z  #FIXME HACK
3375    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3376
3377    # do this to create an ok sww file.
3378    #outfile.variables['time'][:] = [0]   #Store time relative
3379    #outfile.variables['stage'] = z
3380    # put the next line up in the code after outfile.order = 1
3381    #number_of_times = 1
3382
3383    stage = outfile.variables['stage']
3384    xmomentum = outfile.variables['xmomentum']
3385    ymomentum = outfile.variables['ymomentum']
3386
3387    outfile.variables['time'][:] = times   #Store time relative
3388
3389    if verbose: print 'Converting quantities'
3390    n = number_of_times
3391    for j in range(number_of_times):
3392        # load in files
3393        elevation_meta, elevation_grid = \
3394           _read_asc(elevation_dir + os.sep + elevation_files[j])
3395
3396        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3397        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3398
3399        #print "elevation_grid",elevation_grid
3400        #cut matrix to desired size
3401        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3402        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3403        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3404        #print "elevation_grid",elevation_grid
3405        # handle missing values
3406        missing = (elevation_grid == elevation_meta['NODATA_value'])
3407        if sometrue (missing):
3408            if fail_on_NaN:
3409                msg = 'File %s contains missing values'\
3410                      %(elevation_files[j])
3411                raise msg
3412            else:
3413                elevation_grid = elevation_grid*(missing==0) + \
3414                                 missing*elevation_NaN_filler
3415
3416
3417        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3418        i = 0
3419        for k in range(number_of_latitudes):      #Y direction
3420            for l in range(number_of_longitudes): #X direction
3421                w = zscale*elevation_grid[k,l] + mean_stage
3422                stage[j,i] = w
3423                h = w - z[i]
3424                xmomentum[j,i] = u_momentum_grid[k,l]*h
3425                ymomentum[j,i] = v_momentum_grid[k,l]*h
3426                i += 1
3427    outfile.close()
3428
3429def _get_min_max_indexes(latitudes,longitudes,
3430                        minlat=None, maxlat=None,
3431                        minlon=None, maxlon=None):
3432    """
3433    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3434    specified with min/max lat/long
3435
3436    Think of the latitudes and longitudes describing a 2d surface.
3437    The area returned is, if possible, just big enough to cover the
3438    inputed max/min area. (This will not be possible if the max/min area
3439    has a section outside of the latitudes/longitudes area.)
3440
3441    assume latitudess & longitudes are sorted,
3442    long - from low to high (west to east, eg 148 - 151)
3443    lat - from high to low (north to south, eg -35 - -38)
3444    """
3445
3446    # reverse order of lat, so it's in ascending order
3447    latitudes.reverse()
3448    largest_lat_index = len(latitudes)-1
3449    #Cut out a smaller extent.
3450    if minlat == None:
3451        lat_min_index = 0
3452    else:
3453        lat_min_index = searchsorted(latitudes, minlat)-1
3454        if lat_min_index <0:
3455            lat_min_index = 0
3456
3457
3458    if maxlat == None:
3459        lat_max_index = largest_lat_index #len(latitudes)
3460    else:
3461        lat_max_index = searchsorted(latitudes, maxlat)
3462        if lat_max_index > largest_lat_index:
3463            lat_max_index = largest_lat_index
3464
3465    if minlon == None:
3466        lon_min_index = 0
3467    else:
3468        lon_min_index = searchsorted(longitudes, minlon)-1
3469        if lon_min_index <0:
3470            lon_min_index = 0
3471
3472    if maxlon == None:
3473        lon_max_index = len(longitudes)
3474    else:
3475        lon_max_index = searchsorted(longitudes, maxlon)
3476
3477    #Take into account that the latitude list was reversed
3478    latitudes.reverse() # Python passes by reference, need to swap back
3479    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3480                                   largest_lat_index - lat_min_index
3481    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3482    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3483
3484    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3485
3486
3487def _read_asc(filename, verbose=False):
3488    """Read esri file from the following ASCII format (.asc)
3489
3490    Example:
3491
3492    ncols         3121
3493    nrows         1800
3494    xllcorner     722000
3495    yllcorner     5893000
3496    cellsize      25
3497    NODATA_value  -9999
3498    138.3698 137.4194 136.5062 135.5558 ..........
3499
3500    """
3501
3502    datafile = open(filename)
3503
3504    if verbose: print 'Reading DEM from %s' %(filename)
3505    lines = datafile.readlines()
3506    datafile.close()
3507
3508    if verbose: print 'Got', len(lines), ' lines'
3509
3510    ncols = int(lines.pop(0).split()[1].strip())
3511    nrows = int(lines.pop(0).split()[1].strip())
3512    xllcorner = float(lines.pop(0).split()[1].strip())
3513    yllcorner = float(lines.pop(0).split()[1].strip())
3514    cellsize = float(lines.pop(0).split()[1].strip())
3515    NODATA_value = float(lines.pop(0).split()[1].strip())
3516
3517    assert len(lines) == nrows
3518
3519    #Store data
3520    grid = []
3521
3522    n = len(lines)
3523    for i, line in enumerate(lines):
3524        cells = line.split()
3525        assert len(cells) == ncols
3526        grid.append(array([float(x) for x in cells]))
3527    grid = array(grid)
3528
3529    return {'xllcorner':xllcorner,
3530            'yllcorner':yllcorner,
3531            'cellsize':cellsize,
3532            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.