source: inundation/pyvolution/data_manager.py @ 2555

Last change on this file since 2555 was 2553, checked in by sexton, 19 years ago

maybe this is it for dem2pts ...

File size: 107.8 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58
59def make_filename(s):
60    """Transform argument string into a suitable filename
61    """
62
63    s = s.strip()
64    s = s.replace(' ', '_')
65    s = s.replace('(', '')
66    s = s.replace(')', '')
67    s = s.replace('__', '_')
68
69    return s
70
71
72def check_dir(path, verbose=None):
73    """Check that specified path exists.
74    If path does not exist it will be created if possible
75
76    USAGE:
77       checkdir(path, verbose):
78
79    ARGUMENTS:
80        path -- Directory
81        verbose -- Flag verbose output (default: None)
82
83    RETURN VALUE:
84        Verified path including trailing separator
85
86    """
87
88    import os, sys
89    import os.path
90
91    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
92        unix = 0
93    else:
94        unix = 1
95
96
97    if path[-1] != os.sep:
98        path = path + os.sep  # Add separator for directories
99
100    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
101    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
102        try:
103            exitcode=os.mkdir(path)
104
105            # Change access rights if possible
106            #
107            if unix:
108                exitcode=os.system('chmod 775 '+path)
109            else:
110                pass  # FIXME: What about acces rights under Windows?
111
112            if verbose: print 'MESSAGE: Directory', path, 'created.'
113
114        except:
115            print 'WARNING: Directory', path, 'could not be created.'
116            if unix:
117                path = '/tmp/'
118            else:
119                path = 'C:'
120
121            print 'Using directory %s instead' %path
122
123    return(path)
124
125
126
127def del_dir(path):
128    """Recursively delete directory path and all its contents
129    """
130
131    import os
132
133    if os.path.isdir(path):
134        for file in os.listdir(path):
135            X = os.path.join(path, file)
136
137
138            if os.path.isdir(X) and not os.path.islink(X):
139                del_dir(X)
140            else:
141                try:
142                    os.remove(X)
143                except:
144                    print "Could not remove file %s" %X
145
146        os.rmdir(path)
147
148
149
150def create_filename(datadir, filename, format, size=None, time=None):
151
152    import os
153    #from config import data_dir
154
155    FN = check_dir(datadir) + filename
156
157    if size is not None:
158        FN += '_size%d' %size
159
160    if time is not None:
161        FN += '_time%.2f' %time
162
163    FN += '.' + format
164    return FN
165
166
167def get_files(datadir, filename, format, size):
168    """Get all file (names) with gven name, size and format
169    """
170
171    import glob
172
173    import os
174    #from config import data_dir
175
176    dir = check_dir(datadir)
177
178    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
179    return glob.glob(pattern)
180
181
182
183#Generic class for storing output to e.g. visualisation or checkpointing
184class Data_format:
185    """Generic interface to data formats
186    """
187
188
189    def __init__(self, domain, extension, mode = 'w'):
190        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
191                                        '''   'w' (write)'''+\
192                                        '''   'r' (read)''' +\
193                                        '''   'a' (append)'''
194
195        #Create filename
196        #self.filename = create_filename(domain.get_datadir(),
197        #domain.get_name(), extension, len(domain))
198
199
200        self.filename = create_filename(domain.get_datadir(),
201                        domain.get_name(), extension)
202
203        #print 'F', self.filename
204        self.timestep = 0
205        self.number_of_volumes = len(domain)
206        self.domain = domain
207
208
209        #FIXME: Should we have a general set_precision function?
210
211
212
213#Class for storing output to e.g. visualisation
214class Data_format_sww(Data_format):
215    """Interface to native NetCDF format (.sww) for storing model output
216
217    There are two kinds of data
218
219    1: Constant data: Vertex coordinates and field values. Stored once
220    2: Variable data: Conserved quantities. Stored once per timestep.
221
222    All data is assumed to reside at vertex locations.
223    """
224
225
226    def __init__(self, domain, mode = 'w',\
227                 max_size = 2000000000,
228                 recursion = False):
229        from Scientific.IO.NetCDF import NetCDFFile
230        from Numeric import Int, Float, Float32
231
232        self.precision = Float32 #Use single precision
233        if hasattr(domain, 'max_size'):
234            self.max_size = domain.max_size #file size max is 2Gig
235        else:
236            self.max_size = max_size
237        self.recursion = recursion
238        self.mode = mode
239
240        Data_format.__init__(self, domain, 'sww', mode)
241
242
243        # NetCDF file definition
244        fid = NetCDFFile(self.filename, mode)
245
246        if mode == 'w':
247
248            #Create new file
249            fid.institution = 'Geoscience Australia'
250            fid.description = 'Output from pyvolution suitable for plotting'
251
252            if domain.smooth:
253                fid.smoothing = 'Yes'
254            else:
255                fid.smoothing = 'No'
256
257            fid.order = domain.default_order
258
259            if hasattr(domain, 'texture'):
260                fid.texture = domain.texture
261            #else:
262            #    fid.texture = 'None'
263
264            #Reference point
265            #Start time in seconds since the epoch (midnight 1/1/1970)
266            #FIXME: Use Georef
267            fid.starttime = domain.starttime
268
269            # dimension definitions
270            fid.createDimension('number_of_volumes', self.number_of_volumes)
271            fid.createDimension('number_of_vertices', 3)
272
273            if domain.smooth is True:
274                fid.createDimension('number_of_points', len(domain.vertexlist))
275            else:
276                fid.createDimension('number_of_points', 3*self.number_of_volumes)
277
278            fid.createDimension('number_of_timesteps', None) #extensible
279
280            # variable definitions
281            fid.createVariable('x', self.precision, ('number_of_points',))
282            fid.createVariable('y', self.precision, ('number_of_points',))
283            fid.createVariable('elevation', self.precision, ('number_of_points',))
284            if domain.geo_reference is not None:
285                domain.geo_reference.write_NetCDF(fid)
286
287            #FIXME: Backwards compatibility
288            fid.createVariable('z', self.precision, ('number_of_points',))
289            #################################
290
291            fid.createVariable('volumes', Int, ('number_of_volumes',
292                                                'number_of_vertices'))
293
294            fid.createVariable('time', self.precision,
295                               ('number_of_timesteps',))
296
297            fid.createVariable('stage', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('xmomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305            fid.createVariable('ymomentum', self.precision,
306                               ('number_of_timesteps',
307                                'number_of_points'))
308
309        #Close
310        fid.close()
311
312
313    def store_connectivity(self):
314        """Specialisation of store_connectivity for net CDF format
315
316        Writes x,y,z coordinates of triangles constituting
317        the bed elevation.
318        """
319
320        from Scientific.IO.NetCDF import NetCDFFile
321
322        from Numeric import concatenate, Int
323
324        domain = self.domain
325
326        #Get NetCDF
327        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
328
329        # Get the variables
330        x = fid.variables['x']
331        y = fid.variables['y']
332        z = fid.variables['elevation']
333
334        volumes = fid.variables['volumes']
335
336        # Get X, Y and bed elevation Z
337        Q = domain.quantities['elevation']
338        X,Y,Z,V = Q.get_vertex_values(xy=True,
339                                      precision = self.precision)
340
341
342
343        x[:] = X.astype(self.precision)
344        y[:] = Y.astype(self.precision)
345        z[:] = Z.astype(self.precision)
346
347        #FIXME: Backwards compatibility
348        z = fid.variables['z']
349        z[:] = Z.astype(self.precision)
350        ################################
351
352        volumes[:] = V.astype(volumes.typecode())
353
354        #Close
355        fid.close()
356
357    def store_timestep(self, names):
358        """Store time and named quantities to file
359        """
360        from Scientific.IO.NetCDF import NetCDFFile
361        import types
362        from time import sleep
363        from os import stat
364
365        #minimum_allowed_depth = 0.001
366        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
367        from Numeric import choose
368
369        #Get NetCDF
370        retries = 0
371        file_open = False
372        while not file_open and retries < 10:
373            try:
374                fid = NetCDFFile(self.filename, 'a') #Open existing file
375            except IOError:
376                #This could happen if someone was reading the file.
377                #In that case, wait a while and try again
378                msg = 'Warning (store_timestep): File %s could not be opened'\
379                      %self.filename
380                msg += ' - trying step %s again' %self.domain.time
381                print msg
382                retries += 1
383                sleep(1)
384            else:
385                file_open = True
386
387        if not file_open:
388            msg = 'File %s could not be opened for append' %self.filename
389            raise msg
390
391
392
393        #Check to see if the file is already too big:
394        time = fid.variables['time']
395        i = len(time)+1
396        file_size = stat(self.filename)[6]
397        file_size_increase =  file_size/i
398        if file_size + file_size_increase > self.max_size*(2**self.recursion):
399            #in order to get the file name and start time correct,
400            #I change the domian.filename and domain.starttime.
401            #This is the only way to do this without changing
402            #other modules (I think).
403
404            #write a filename addon that won't break swollens reader
405            #(10.sww is bad)
406            filename_ext = '_time_%s'%self.domain.time
407            filename_ext = filename_ext.replace('.', '_')
408            #remember the old filename, then give domain a
409            #name with the extension
410            old_domain_filename = self.domain.filename
411            if not self.recursion:
412                self.domain.filename = self.domain.filename+filename_ext
413
414            #change the domain starttime to the current time
415            old_domain_starttime = self.domain.starttime
416            self.domain.starttime = self.domain.time
417
418            #build a new data_structure.
419            next_data_structure=\
420                Data_format_sww(self.domain, mode=self.mode,\
421                                max_size = self.max_size,\
422                                recursion = self.recursion+1)
423            if not self.recursion:
424                print '    file_size = %s'%file_size
425                print '    saving file to %s'%next_data_structure.filename
426            #set up the new data_structure
427            self.domain.writer = next_data_structure
428
429            #FIXME - could be cleaner to use domain.store_timestep etc.
430            next_data_structure.store_connectivity()
431            next_data_structure.store_timestep(names)
432            fid.sync()
433            fid.close()
434
435            #restore the old starttime and filename
436            self.domain.starttime = old_domain_starttime
437            self.domain.filename = old_domain_filename
438        else:
439            self.recursion = False
440            domain = self.domain
441
442            # Get the variables
443            time = fid.variables['time']
444            stage = fid.variables['stage']
445            xmomentum = fid.variables['xmomentum']
446            ymomentum = fid.variables['ymomentum']
447            i = len(time)
448
449            #Store time
450            time[i] = self.domain.time
451
452
453            if type(names) not in [types.ListType, types.TupleType]:
454                names = [names]
455
456            for name in names:
457                # Get quantity
458                Q = domain.quantities[name]
459                A,V = Q.get_vertex_values(xy = False,
460                                          precision = self.precision)
461
462                #FIXME: Make this general (see below)
463                if name == 'stage':
464                    z = fid.variables['elevation']
465                    #print z[:]
466                    #print A-z[:]
467                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
468                    stage[i,:] = A.astype(self.precision)
469                elif name == 'xmomentum':
470                    xmomentum[i,:] = A.astype(self.precision)
471                elif name == 'ymomentum':
472                    ymomentum[i,:] = A.astype(self.precision)
473
474                #As in....
475                #eval( name + '[i,:] = A.astype(self.precision)' )
476                #FIXME: But we need a UNIT test for that before refactoring
477
478
479
480            #Flush and close
481            fid.sync()
482            fid.close()
483
484
485
486#Class for handling checkpoints data
487class Data_format_cpt(Data_format):
488    """Interface to native NetCDF format (.cpt)
489    """
490
491
492    def __init__(self, domain, mode = 'w'):
493        from Scientific.IO.NetCDF import NetCDFFile
494        from Numeric import Int, Float, Float
495
496        self.precision = Float #Use full precision
497
498        Data_format.__init__(self, domain, 'sww', mode)
499
500
501        # NetCDF file definition
502        fid = NetCDFFile(self.filename, mode)
503
504        if mode == 'w':
505            #Create new file
506            fid.institution = 'Geoscience Australia'
507            fid.description = 'Checkpoint data'
508            #fid.smooth = domain.smooth
509            fid.order = domain.default_order
510
511            # dimension definitions
512            fid.createDimension('number_of_volumes', self.number_of_volumes)
513            fid.createDimension('number_of_vertices', 3)
514
515            #Store info at all vertices (no smoothing)
516            fid.createDimension('number_of_points', 3*self.number_of_volumes)
517            fid.createDimension('number_of_timesteps', None) #extensible
518
519            # variable definitions
520
521            #Mesh
522            fid.createVariable('x', self.precision, ('number_of_points',))
523            fid.createVariable('y', self.precision, ('number_of_points',))
524
525
526            fid.createVariable('volumes', Int, ('number_of_volumes',
527                                                'number_of_vertices'))
528
529            fid.createVariable('time', self.precision,
530                               ('number_of_timesteps',))
531
532            #Allocate space for all quantities
533            for name in domain.quantities.keys():
534                fid.createVariable(name, self.precision,
535                                   ('number_of_timesteps',
536                                    'number_of_points'))
537
538        #Close
539        fid.close()
540
541
542    def store_checkpoint(self):
543        """
544        Write x,y coordinates of triangles.
545        Write connectivity (
546        constituting
547        the bed elevation.
548        """
549
550        from Scientific.IO.NetCDF import NetCDFFile
551
552        from Numeric import concatenate
553
554        domain = self.domain
555
556        #Get NetCDF
557        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
558
559        # Get the variables
560        x = fid.variables['x']
561        y = fid.variables['y']
562
563        volumes = fid.variables['volumes']
564
565        # Get X, Y and bed elevation Z
566        Q = domain.quantities['elevation']
567        X,Y,Z,V = Q.get_vertex_values(xy=True,
568                                      precision = self.precision)
569
570
571
572        x[:] = X.astype(self.precision)
573        y[:] = Y.astype(self.precision)
574        z[:] = Z.astype(self.precision)
575
576        volumes[:] = V
577
578        #Close
579        fid.close()
580
581
582    def store_timestep(self, name):
583        """Store time and named quantity to file
584        """
585        from Scientific.IO.NetCDF import NetCDFFile
586        from time import sleep
587
588        #Get NetCDF
589        retries = 0
590        file_open = False
591        while not file_open and retries < 10:
592            try:
593                fid = NetCDFFile(self.filename, 'a') #Open existing file
594            except IOError:
595                #This could happen if someone was reading the file.
596                #In that case, wait a while and try again
597                msg = 'Warning (store_timestep): File %s could not be opened'\
598                          %self.filename
599                msg += ' - trying again'
600                print msg
601                retries += 1
602                sleep(1)
603            else:
604                file_open = True
605
606            if not file_open:
607                msg = 'File %s could not be opened for append' %self.filename
608            raise msg
609
610
611        domain = self.domain
612
613        # Get the variables
614        time = fid.variables['time']
615        stage = fid.variables['stage']
616        i = len(time)
617
618        #Store stage
619        time[i] = self.domain.time
620
621        # Get quantity
622        Q = domain.quantities[name]
623        A,V = Q.get_vertex_values(xy=False,
624                                  precision = self.precision)
625
626        stage[i,:] = A.astype(self.precision)
627
628        #Flush and close
629        fid.sync()
630        fid.close()
631
632
633
634
635
636#Function for storing xya output
637#FIXME Not done yet for this version
638class Data_format_xya(Data_format):
639    """Generic interface to data formats
640    """
641
642
643    def __init__(self, domain, mode = 'w'):
644        from Scientific.IO.NetCDF import NetCDFFile
645        from Numeric import Int, Float, Float32
646
647        self.precision = Float32 #Use single precision
648
649        Data_format.__init__(self, domain, 'xya', mode)
650
651
652
653    #FIXME -This is the old xya format
654    def store_all(self):
655        """Specialisation of store all for xya format
656
657        Writes x,y,z coordinates of triangles constituting
658        the bed elevation.
659        """
660
661        from Numeric import concatenate
662
663        domain = self.domain
664
665        fd = open(self.filename, 'w')
666
667
668        if domain.smooth is True:
669            number_of_points =  len(domain.vertexlist)
670        else:
671            number_of_points = 3*self.number_of_volumes
672
673        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
674
675        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
676                 " # <vertex #> <x> <y> [attributes]" + "\n")
677
678
679        # Get X, Y, bed elevation and friction (index=0,1)
680        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
681                                           indices = (0,1), precision = self.precision)
682
683        bed_eles = A[:,0]
684        fricts = A[:,1]
685
686        # Get stage (index=0)
687        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
688                                       indices = (0,), precision = self.precision)
689
690        stages = B[:,0]
691
692        #<vertex #> <x> <y> [attributes]
693        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
694                                               stages, fricts):
695
696            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
697            fd.write(s)
698
699        #close
700        fd.close()
701
702
703    def store_timestep(self, t, V0, V1, V2):
704        """Store time, water heights (and momentums) to file
705        """
706        pass
707
708
709#Auxiliary
710def write_obj(filename,x,y,z):
711    """Store x,y,z vectors into filename (obj format)
712       Vectors are assumed to have dimension (M,3) where
713       M corresponds to the number elements.
714       triangles are assumed to be disconnected
715
716       The three numbers in each vector correspond to three vertices,
717
718       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
719
720    """
721    #print 'Writing obj to %s' % filename
722
723    import os.path
724
725    root, ext = os.path.splitext(filename)
726    if ext == '.obj':
727        FN = filename
728    else:
729        FN = filename + '.obj'
730
731
732    outfile = open(FN, 'wb')
733    outfile.write("# Triangulation as an obj file\n")
734
735    M, N = x.shape
736    assert N==3  #Assuming three vertices per element
737
738    for i in range(M):
739        for j in range(N):
740            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
741
742    for i in range(M):
743        base = i*N
744        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
745
746    outfile.close()
747
748
749#########################################################
750#Conversion routines
751########################################################
752
753def sww2obj(basefilename, size):
754    """Convert netcdf based data output to obj
755    """
756    from Scientific.IO.NetCDF import NetCDFFile
757
758    from Numeric import Float, zeros
759
760    #Get NetCDF
761    FN = create_filename('.', basefilename, 'sww', size)
762    print 'Reading from ', FN
763    fid = NetCDFFile(FN, 'r')  #Open existing file for read
764
765
766    # Get the variables
767    x = fid.variables['x']
768    y = fid.variables['y']
769    z = fid.variables['elevation']
770    time = fid.variables['time']
771    stage = fid.variables['stage']
772
773    M = size  #Number of lines
774    xx = zeros((M,3), Float)
775    yy = zeros((M,3), Float)
776    zz = zeros((M,3), Float)
777
778    for i in range(M):
779        for j in range(3):
780            xx[i,j] = x[i+j*M]
781            yy[i,j] = y[i+j*M]
782            zz[i,j] = z[i+j*M]
783
784    #Write obj for bathymetry
785    FN = create_filename('.', basefilename, 'obj', size)
786    write_obj(FN,xx,yy,zz)
787
788
789    #Now read all the data with variable information, combine with
790    #x,y info and store as obj
791
792    for k in range(len(time)):
793        t = time[k]
794        print 'Processing timestep %f' %t
795
796        for i in range(M):
797            for j in range(3):
798                zz[i,j] = stage[k,i+j*M]
799
800
801        #Write obj for variable data
802        #FN = create_filename(basefilename, 'obj', size, time=t)
803        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
804        write_obj(FN,xx,yy,zz)
805
806
807def dat2obj(basefilename):
808    """Convert line based data output to obj
809    FIXME: Obsolete?
810    """
811
812    import glob, os
813    from config import data_dir
814
815
816    #Get bathymetry and x,y's
817    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
818
819    from Numeric import zeros, Float
820
821    M = len(lines)  #Number of lines
822    x = zeros((M,3), Float)
823    y = zeros((M,3), Float)
824    z = zeros((M,3), Float)
825
826    ##i = 0
827    for i, line in enumerate(lines):
828        tokens = line.split()
829        values = map(float,tokens)
830
831        for j in range(3):
832            x[i,j] = values[j*3]
833            y[i,j] = values[j*3+1]
834            z[i,j] = values[j*3+2]
835
836        ##i += 1
837
838
839    #Write obj for bathymetry
840    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
841
842
843    #Now read all the data files with variable information, combine with
844    #x,y info
845    #and store as obj
846
847    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
848
849    for filename in files:
850        print 'Processing %s' % filename
851
852        lines = open(data_dir+os.sep+filename,'r').readlines()
853        assert len(lines) == M
854        root, ext = os.path.splitext(filename)
855
856        #Get time from filename
857        i0 = filename.find('_time=')
858        if i0 == -1:
859            #Skip bathymetry file
860            continue
861
862        i0 += 6  #Position where time starts
863        i1 = filename.find('.dat')
864
865        if i1 > i0:
866            t = float(filename[i0:i1])
867        else:
868            raise 'Hmmmm'
869
870
871
872        ##i = 0
873        for i, line in enumerate(lines):
874            tokens = line.split()
875            values = map(float,tokens)
876
877            for j in range(3):
878                z[i,j] = values[j]
879
880            ##i += 1
881
882        #Write obj for variable data
883        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
884
885
886def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
887    """Read netcdf filename1, pick timesteps first:step:last and save to
888    nettcdf file filename2
889    """
890    from Scientific.IO.NetCDF import NetCDFFile
891
892    #Get NetCDF
893    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
894    outfile = NetCDFFile(filename2, 'w')  #Open new file
895
896
897    #Copy dimensions
898    for d in infile.dimensions:
899        outfile.createDimension(d, infile.dimensions[d])
900
901    for name in infile.variables:
902        var = infile.variables[name]
903        outfile.createVariable(name, var.typecode(), var.dimensions)
904
905
906    #Copy the static variables
907    for name in infile.variables:
908        if name == 'time' or name == 'stage':
909            pass
910        else:
911            #Copy
912            outfile.variables[name][:] = infile.variables[name][:]
913
914    #Copy selected timesteps
915    time = infile.variables['time']
916    stage = infile.variables['stage']
917
918    newtime = outfile.variables['time']
919    newstage = outfile.variables['stage']
920
921    if last is None:
922        last = len(time)
923
924    selection = range(first, last, step)
925    for i, j in enumerate(selection):
926        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
927        newtime[i] = time[j]
928        newstage[i,:] = stage[j,:]
929
930    #Close
931    infile.close()
932    outfile.close()
933
934
935#Get data objects
936def get_dataobject(domain, mode='w'):
937    """Return instance of class of given format using filename
938    """
939
940    cls = eval('Data_format_%s' %domain.format)
941    return cls(domain, mode)
942
943def xya2pts(basename_in, basename_out=None, verbose=False,
944            #easting_min=None, easting_max=None,
945            #northing_min=None, northing_max=None,
946            stride = 1,
947            attribute_name = 'elevation',
948            z_func = None):
949    """Read points data from ascii (.xya)
950
951    Example:
952
953              x(m)        y(m)        z(m)
954         0.00000e+00  0.00000e+00  1.3535000e-01
955         0.00000e+00  1.40000e-02  1.3535000e-01
956
957
958
959    Convert to NetCDF pts format which is
960
961    points:  (Nx2) Float array
962    elevation: N Float array
963
964    Only lines that contain three numeric values are processed
965
966    If z_func is specified, it will be applied to the third field
967    """
968
969    import os
970    #from Scientific.IO.NetCDF import NetCDFFile
971    from Numeric import Float, arrayrange, concatenate
972
973    root, ext = os.path.splitext(basename_in)
974
975    if ext == '': ext = '.xya'
976
977    #Get NetCDF
978    infile = open(root + ext, 'r')  #Open existing xya file for read
979
980    if verbose: print 'Reading xya points from %s' %(root + ext)
981
982    points = []
983    attribute = []
984    for i, line in enumerate(infile.readlines()):
985
986        if i % stride != 0: continue
987
988        fields = line.split()
989
990        try:
991            assert len(fields) == 3
992        except:
993            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
994
995        try:
996            x = float( fields[0] )
997            y = float( fields[1] )
998            z = float( fields[2] )
999        except:
1000            continue
1001
1002        points.append( [x, y] )
1003
1004        if callable(z_func):
1005            attribute.append(z_func(z))
1006        else:
1007            attribute.append(z)
1008
1009
1010    #Get output file
1011    if basename_out == None:
1012        ptsname = root + '.pts'
1013    else:
1014        ptsname = basename_out + '.pts'
1015
1016    if verbose: print 'Store to NetCDF file %s' %ptsname
1017    write_ptsfile(ptsname, points, attribute, attribute_name)
1018
1019
1020
1021######Obsoleted by export_points in load_mesh
1022def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1023                  zone=None, xllcorner=None, yllcorner=None):
1024    """Write points and associated attribute to pts (NetCDF) format
1025    """
1026
1027    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1028
1029    from Numeric import Float
1030
1031    if attribute_name is None:
1032        attribute_name = 'attribute'
1033
1034
1035    from Scientific.IO.NetCDF import NetCDFFile
1036
1037    # NetCDF file definition
1038    outfile = NetCDFFile(ptsname, 'w')
1039
1040
1041    #Create new file
1042    outfile.institution = 'Geoscience Australia'
1043    outfile.description = 'NetCDF pts format for compact and '\
1044                          'portable storage of spatial point data'
1045
1046
1047    #Georeferencing
1048    from coordinate_transforms.geo_reference import Geo_reference
1049    if zone is None:
1050        assert xllcorner is None, 'xllcorner must be None'
1051        assert yllcorner is None, 'yllcorner must be None'       
1052        Geo_reference().write_NetCDF(outfile)
1053    else:
1054        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1055       
1056
1057
1058    outfile.createDimension('number_of_points', len(points))
1059    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1060
1061    # variable definitions
1062    outfile.createVariable('points', Float, ('number_of_points',
1063                                             'number_of_dimensions'))
1064    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1065
1066    # Get handles to the variables
1067    nc_points = outfile.variables['points']
1068    nc_attribute = outfile.variables[attribute_name]
1069
1070    #Store data
1071    nc_points[:, :] = points
1072    nc_attribute[:] = attribute
1073
1074    outfile.close()
1075
1076
1077def dem2pts(basename_in, basename_out=None, 
1078            easting_min=None, easting_max=None,
1079            northing_min=None, northing_max=None,
1080            use_cache=False, verbose=False,):
1081    """Read Digitial Elevation model from the following NetCDF format (.dem)
1082
1083    Example:
1084
1085    ncols         3121
1086    nrows         1800
1087    xllcorner     722000
1088    yllcorner     5893000
1089    cellsize      25
1090    NODATA_value  -9999
1091    138.3698 137.4194 136.5062 135.5558 ..........
1092
1093    Convert to NetCDF pts format which is
1094
1095    points:  (Nx2) Float array
1096    elevation: N Float array
1097    """
1098
1099
1100
1101    kwargs = {'basename_out': basename_out, 
1102              'easting_min': easting_min,
1103              'easting_max': easting_max,
1104              'northing_min': northing_min,
1105              'northing_max': northing_max,
1106              'verbose': verbose}
1107
1108    if use_cache is True:
1109        from caching import cache
1110        result = cache(_dem2pts, basename_in, kwargs,
1111                       dependencies = [basename_in + '.dem'],
1112                       verbose = verbose)
1113
1114    else:
1115        result = apply(_dem2pts, [basename_in], kwargs)
1116       
1117    return result
1118
1119
1120def _dem2pts(basename_in, basename_out=None, verbose=False,
1121            easting_min=None, easting_max=None,
1122            northing_min=None, northing_max=None):
1123    """Read Digitial Elevation model from the following NetCDF format (.dem)
1124
1125    Internal function. See public function dem2pts for details.   
1126    """
1127
1128    #FIXME: Can this be written feasibly using write_pts?
1129
1130    import os
1131    from Scientific.IO.NetCDF import NetCDFFile
1132    from Numeric import Float, zeros, reshape, sum
1133
1134    root = basename_in
1135
1136    #Get NetCDF
1137    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1138
1139    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1140
1141    ncols = infile.ncols[0]
1142    nrows = infile.nrows[0]
1143    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1144    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1145    cellsize = infile.cellsize[0]
1146    NODATA_value = infile.NODATA_value[0]
1147    dem_elevation = infile.variables['elevation']
1148
1149    zone = infile.zone[0]
1150    false_easting = infile.false_easting[0]
1151    false_northing = infile.false_northing[0]
1152
1153    #Text strings
1154    projection = infile.projection
1155    datum = infile.datum
1156    units = infile.units
1157
1158
1159    #Get output file
1160    if basename_out == None:
1161        ptsname = root + '.pts'
1162    else:
1163        ptsname = basename_out + '.pts'
1164
1165    if verbose: print 'Store to NetCDF file %s' %ptsname
1166    # NetCDF file definition
1167    outfile = NetCDFFile(ptsname, 'w')
1168
1169    #Create new file
1170    outfile.institution = 'Geoscience Australia'
1171    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1172                      'of spatial point data'
1173    #assign default values
1174    if easting_min is None: easting_min = xllcorner
1175    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1176    if northing_min is None: northing_min = yllcorner
1177    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1178
1179    #compute offsets to update georeferencing
1180    easting_offset = xllcorner - easting_min
1181    northing_offset = yllcorner - northing_min
1182
1183    #Georeferencing
1184    outfile.zone = zone
1185    outfile.xllcorner = easting_min #Easting of lower left corner
1186    outfile.yllcorner = northing_min #Northing of lower left corner
1187    outfile.false_easting = false_easting
1188    outfile.false_northing = false_northing
1189
1190    outfile.projection = projection
1191    outfile.datum = datum
1192    outfile.units = units
1193
1194
1195    #Grid info (FIXME: probably not going to be used, but heck)
1196    outfile.ncols = ncols
1197    outfile.nrows = nrows
1198
1199    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1200    totalnopoints = nrows*ncols
1201
1202    # calculating number of NODATA_values for each row in clipped region
1203    #FIXME: use array operations to do faster
1204    nn = 0
1205    k = 0
1206    i1_0 = 0
1207    j1_0 = 0
1208    thisj = 0
1209    thisi = 0
1210    for i in range(nrows):
1211        y = (nrows-i-1)*cellsize + yllcorner
1212        for j in range(ncols):
1213            x = j*cellsize + xllcorner
1214            if easting_min <= x <= easting_max and \
1215               northing_min <= y <= northing_max:
1216                thisj = j
1217                thisi = i
1218                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1219
1220                if k == 0:
1221                    i1_0 = i
1222                    j1_0 = j
1223                k += 1   
1224
1225    index1 = j1_0
1226    index2 = thisj
1227   
1228    # dimension definitions
1229    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1230    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1231   
1232    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0) 
1233    nopoints = clippednopoints-nn
1234
1235    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1236   
1237    if verbose and nn > 0:
1238        print 'There are %d values in the elevation' %totalnopoints
1239        print 'There are %d values in the clipped elevation' %clippednopoints
1240        print 'There are %d NODATA_values in the clipped elevation' %nn
1241       
1242    outfile.createDimension('number_of_points', nopoints)
1243    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1244
1245    # variable definitions
1246    outfile.createVariable('points', Float, ('number_of_points',
1247                                             'number_of_dimensions'))
1248    outfile.createVariable('elevation', Float, ('number_of_points',))
1249
1250    # Get handles to the variables
1251    points = outfile.variables['points']
1252    elevation = outfile.variables['elevation']
1253
1254    lenv = index2-index1+1
1255    #Store data
1256    global_index = 0
1257    #for i in range(nrows):
1258    for i in range(i1_0,thisi+1,1):
1259        if verbose and i%((nrows+10)/10)==0:
1260            print 'Processing row %d of %d' %(i, nrows)
1261
1262        lower_index = global_index
1263
1264        v = dem_elevation_r[i,index1:index2+1]
1265        no_NODATA = sum(v == NODATA_value)
1266        if no_NODATA > 0:
1267            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1268        else:
1269            newcols = lenv #ncols_in_bounding_box
1270
1271        telev = zeros(newcols, Float)
1272        tpoints = zeros((newcols, 2), Float)
1273
1274        local_index = 0
1275       
1276        y = (nrows-i-1)*cellsize + yllcorner
1277        #for j in range(ncols):
1278        for j in range(j1_0,index2+1,1):
1279
1280            x = j*cellsize + xllcorner
1281            if easting_min <= x <= easting_max and \
1282               northing_min <= y <= northing_max and \
1283               dem_elevation_r[i,j] <> NODATA_value:
1284                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1285                telev[local_index] = dem_elevation_r[i, j]
1286                global_index += 1
1287                local_index += 1
1288               
1289        upper_index = global_index
1290
1291        if upper_index == lower_index + newcols: 
1292            points[lower_index:upper_index, :] = tpoints
1293            elevation[lower_index:upper_index] = telev
1294
1295    assert global_index == nopoints, 'index not equal to number of points'
1296
1297    infile.close()
1298    outfile.close()
1299
1300
1301
1302def _read_hecras_cross_sections(lines):
1303    """Return block of surface lines for each cross section
1304    Starts with SURFACE LINE,
1305    Ends with END CROSS-SECTION
1306    """
1307
1308    points = []
1309
1310    reading_surface = False
1311    for i, line in enumerate(lines):
1312
1313        if len(line.strip()) == 0:    #Ignore blanks
1314            continue
1315
1316        if lines[i].strip().startswith('SURFACE LINE'):
1317            reading_surface = True
1318            continue
1319
1320        if lines[i].strip().startswith('END') and reading_surface:
1321            yield points
1322            reading_surface = False
1323            points = []
1324
1325        if reading_surface:
1326            fields = line.strip().split(',')
1327            easting = float(fields[0])
1328            northing = float(fields[1])
1329            elevation = float(fields[2])
1330            points.append([easting, northing, elevation])
1331
1332
1333
1334
1335def hecras_cross_sections2pts(basename_in,
1336                              basename_out=None,
1337                              verbose=False):
1338    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1339
1340    Example:
1341
1342
1343# RAS export file created on Mon 15Aug2005 11:42
1344# by HEC-RAS Version 3.1.1
1345
1346BEGIN HEADER:
1347  UNITS: METRIC
1348  DTM TYPE: TIN
1349  DTM: v:\1\cit\perth_topo\river_tin
1350  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1351  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1352  MAP PROJECTION: UTM
1353  PROJECTION ZONE: 50
1354  DATUM: AGD66
1355  VERTICAL DATUM:
1356  NUMBER OF REACHES:  19
1357  NUMBER OF CROSS-SECTIONS:  14206
1358END HEADER:
1359
1360
1361Only the SURFACE LINE data of the following form will be utilised
1362
1363  CROSS-SECTION:
1364    STREAM ID:Southern-Wungong
1365    REACH ID:Southern-Wungong
1366    STATION:19040.*
1367    CUT LINE:
1368      405548.671603161 , 6438142.7594925
1369      405734.536092045 , 6438326.10404912
1370      405745.130459356 , 6438331.48627354
1371      405813.89633823 , 6438368.6272789
1372    SURFACE LINE:
1373     405548.67,   6438142.76,   35.37
1374     405552.24,   6438146.28,   35.41
1375     405554.78,   6438148.78,   35.44
1376     405555.80,   6438149.79,   35.44
1377     405559.37,   6438153.31,   35.45
1378     405560.88,   6438154.81,   35.44
1379     405562.93,   6438156.83,   35.42
1380     405566.50,   6438160.35,   35.38
1381     405566.99,   6438160.83,   35.37
1382     ...
1383   END CROSS-SECTION
1384
1385    Convert to NetCDF pts format which is
1386
1387    points:  (Nx2) Float array
1388    elevation: N Float array
1389    """
1390
1391    #FIXME: Can this be written feasibly using write_pts?
1392
1393    import os
1394    from Scientific.IO.NetCDF import NetCDFFile
1395    from Numeric import Float, zeros, reshape
1396
1397    root = basename_in
1398
1399    #Get ASCII file
1400    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1401
1402    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1403
1404    lines = infile.readlines()
1405    infile.close()
1406
1407    if verbose: print 'Converting to pts format'
1408
1409    i = 0
1410    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1411        i += 1
1412
1413    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1414    i += 1
1415
1416    assert lines[i].strip().upper().startswith('UNITS:')
1417    units = lines[i].strip().split()[1]
1418    i += 1
1419
1420    assert lines[i].strip().upper().startswith('DTM TYPE:')
1421    i += 1
1422
1423    assert lines[i].strip().upper().startswith('DTM:')
1424    i += 1
1425
1426    assert lines[i].strip().upper().startswith('STREAM')
1427    i += 1
1428
1429    assert lines[i].strip().upper().startswith('CROSS')
1430    i += 1
1431
1432    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1433    projection = lines[i].strip().split(':')[1]
1434    i += 1
1435
1436    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1437    zone = int(lines[i].strip().split(':')[1])
1438    i += 1
1439
1440    assert lines[i].strip().upper().startswith('DATUM:')
1441    datum = lines[i].strip().split(':')[1]
1442    i += 1
1443
1444    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1445    i += 1
1446
1447    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1448    i += 1
1449
1450    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1451    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1452    i += 1
1453
1454
1455    #Now read all points
1456    points = []
1457    elevation = []
1458    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1459        for k, entry in enumerate(entries):
1460            points.append(entry[:2])
1461            elevation.append(entry[2])
1462
1463
1464    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1465          %(j+1, number_of_cross_sections)
1466    assert j+1 == number_of_cross_sections, msg
1467
1468    #Get output file
1469    if basename_out == None:
1470        ptsname = root + '.pts'
1471    else:
1472        ptsname = basename_out + '.pts'
1473
1474    #FIXME (DSG-ON): use loadASCII export_points_file
1475    if verbose: print 'Store to NetCDF file %s' %ptsname
1476    # NetCDF file definition
1477    outfile = NetCDFFile(ptsname, 'w')
1478
1479    #Create new file
1480    outfile.institution = 'Geoscience Australia'
1481    outfile.description = 'NetCDF pts format for compact and portable ' +\
1482                          'storage of spatial point data derived from HEC-RAS'
1483
1484    #Georeferencing
1485    outfile.zone = zone
1486    outfile.xllcorner = 0.0
1487    outfile.yllcorner = 0.0
1488    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1489    outfile.false_northing = 1000000
1490
1491    outfile.projection = projection
1492    outfile.datum = datum
1493    outfile.units = units
1494
1495
1496    outfile.createDimension('number_of_points', len(points))
1497    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1498
1499    # variable definitions
1500    outfile.createVariable('points', Float, ('number_of_points',
1501                                             'number_of_dimensions'))
1502    outfile.createVariable('elevation', Float, ('number_of_points',))
1503
1504    # Get handles to the variables
1505    outfile.variables['points'][:] = points
1506    outfile.variables['elevation'][:] = elevation
1507
1508    outfile.close()
1509
1510
1511
1512def sww2dem(basename_in, basename_out = None,
1513            quantity = None,
1514            timestep = None,
1515            reduction = None,
1516            cellsize = 10,
1517            NODATA_value = -9999,
1518            easting_min = None,
1519            easting_max = None,
1520            northing_min = None,
1521            northing_max = None,
1522            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1523            verbose = False,
1524            origin = None,
1525            datum = 'WGS84',
1526            format = 'ers'):
1527
1528    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1529
1530    Example (ASC):
1531
1532    ncols         3121
1533    nrows         1800
1534    xllcorner     722000
1535    yllcorner     5893000
1536    cellsize      25
1537    NODATA_value  -9999
1538    138.3698 137.4194 136.5062 135.5558 ..........
1539
1540    Also write accompanying file with same basename_in but extension .prj
1541    used to fix the UTM zone, datum, false northings and eastings.
1542
1543    The prj format is assumed to be as
1544
1545    Projection    UTM
1546    Zone          56
1547    Datum         WGS84
1548    Zunits        NO
1549    Units         METERS
1550    Spheroid      WGS84
1551    Xshift        0.0000000000
1552    Yshift        10000000.0000000000
1553    Parameters
1554
1555
1556    The parameter quantity must be the name of an existing quantity or
1557    an expression involving existing quantities. The default is
1558    'elevation'.
1559
1560    if timestep (an index) is given, output quantity at that timestep
1561
1562    if reduction is given use that to reduce quantity over all timesteps.
1563
1564    datum
1565
1566    format can be either 'asc' or 'ers'
1567    """
1568
1569    import sys
1570    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1571    from Numeric import array2string
1572
1573    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1574    from util import apply_expression_to_dictionary
1575
1576    msg = 'Format must be either asc or ers'
1577    assert format.lower() in ['asc', 'ers'], msg
1578
1579
1580    false_easting = 500000
1581    false_northing = 10000000
1582
1583    if quantity is None:
1584        quantity = 'elevation'
1585
1586    if reduction is None:
1587        reduction = max
1588
1589    if basename_out is None:
1590        basename_out = basename_in + '_%s' %quantity
1591
1592    swwfile = basename_in + '.sww'
1593    demfile = basename_out + '.' + format
1594    # Note the use of a .ers extension is optional (write_ermapper_grid will
1595    # deal with either option
1596
1597    #Read sww file
1598    if verbose: print 'Reading from %s' %swwfile
1599    from Scientific.IO.NetCDF import NetCDFFile
1600    fid = NetCDFFile(swwfile)
1601
1602    #Get extent and reference
1603    x = fid.variables['x'][:]
1604    y = fid.variables['y'][:]
1605    volumes = fid.variables['volumes'][:]
1606
1607    number_of_timesteps = fid.dimensions['number_of_timesteps']
1608    number_of_points = fid.dimensions['number_of_points']
1609    if origin is None:
1610
1611        #Get geo_reference
1612        #sww files don't have to have a geo_ref
1613        try:
1614            geo_reference = Geo_reference(NetCDFObject=fid)
1615        except AttributeError, e:
1616            geo_reference = Geo_reference() #Default georef object
1617
1618        xllcorner = geo_reference.get_xllcorner()
1619        yllcorner = geo_reference.get_yllcorner()
1620        zone = geo_reference.get_zone()
1621    else:
1622        zone = origin[0]
1623        xllcorner = origin[1]
1624        yllcorner = origin[2]
1625
1626
1627
1628    #FIXME: Refactor using code from file_function.statistics
1629    #Something like print swwstats(swwname)
1630    if verbose:
1631        x = fid.variables['x'][:]
1632        y = fid.variables['y'][:]
1633        times = fid.variables['time'][:]
1634        print '------------------------------------------------'
1635        print 'Statistics of SWW file:'
1636        print '  Name: %s' %swwfile
1637        print '  Reference:'
1638        print '    Lower left corner: [%f, %f]'\
1639              %(xllcorner, yllcorner)
1640        print '    Start time: %f' %fid.starttime[0]
1641        print '  Extent:'
1642        print '    x [m] in [%f, %f], len(x) == %d'\
1643              %(min(x.flat), max(x.flat), len(x.flat))
1644        print '    y [m] in [%f, %f], len(y) == %d'\
1645              %(min(y.flat), max(y.flat), len(y.flat))
1646        print '    t [s] in [%f, %f], len(t) == %d'\
1647              %(min(times), max(times), len(times))
1648        print '  Quantities [SI units]:'
1649        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1650            q = fid.variables[name][:].flat
1651            print '    %s in [%f, %f]' %(name, min(q), max(q))
1652
1653
1654
1655
1656
1657    #Get quantity and reduce if applicable
1658    if verbose: print 'Processing quantity %s' %quantity
1659
1660    #Turn NetCDF objects into Numeric arrays
1661    quantity_dict = {}
1662    for name in fid.variables.keys():
1663        quantity_dict[name] = fid.variables[name][:]
1664
1665
1666    q = apply_expression_to_dictionary(quantity, quantity_dict)
1667
1668
1669
1670    if len(q.shape) == 2:
1671        #q has a time component and needs to be reduced along
1672        #the temporal dimension
1673        if verbose: print 'Reducing quantity %s' %quantity
1674        q_reduced = zeros( number_of_points, Float )
1675
1676        for k in range(number_of_points):
1677            q_reduced[k] = reduction( q[:,k] )
1678
1679        q = q_reduced
1680
1681    #Post condition: Now q has dimension: number_of_points
1682    assert len(q.shape) == 1
1683    assert q.shape[0] == number_of_points
1684
1685
1686    if verbose:
1687        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1688
1689
1690    #Create grid and update xll/yll corner and x,y
1691
1692    #Relative extent
1693    if easting_min is None:
1694        xmin = min(x)
1695    else:
1696        xmin = easting_min - xllcorner
1697
1698    if easting_max is None:
1699        xmax = max(x)
1700    else:
1701        xmax = easting_max - xllcorner
1702
1703    if northing_min is None:
1704        ymin = min(y)
1705    else:
1706        ymin = northing_min - yllcorner
1707
1708    if northing_max is None:
1709        ymax = max(y)
1710    else:
1711        ymax = northing_max - yllcorner
1712
1713
1714
1715    if verbose: print 'Creating grid'
1716    ncols = int((xmax-xmin)/cellsize)+1
1717    nrows = int((ymax-ymin)/cellsize)+1
1718
1719
1720    #New absolute reference and coordinates
1721    newxllcorner = xmin+xllcorner
1722    newyllcorner = ymin+yllcorner
1723
1724    x = x+xllcorner-newxllcorner
1725    y = y+yllcorner-newyllcorner
1726
1727    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1728    assert len(vertex_points.shape) == 2
1729
1730
1731
1732    grid_points = zeros ( (ncols*nrows, 2), Float )
1733
1734
1735    for i in xrange(nrows):
1736        if format.lower() == 'asc':
1737            yg = i*cellsize
1738        else:
1739            #this will flip the order of the y values for ers
1740            yg = (nrows-i)*cellsize
1741
1742        for j in xrange(ncols):
1743            xg = j*cellsize
1744            k = i*ncols + j
1745
1746            grid_points[k,0] = xg
1747            grid_points[k,1] = yg
1748
1749    #Interpolate
1750    from least_squares import Interpolation
1751
1752
1753    #FIXME: This should be done with precrop = True (?), otherwise it'll
1754    #take forever. With expand_search  set to False, some grid points might
1755    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1756
1757    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1758                           precrop = False,
1759                           expand_search = expand_search,
1760                           verbose = verbose)
1761
1762
1763
1764    #Interpolate using quantity values
1765    if verbose: print 'Interpolating'
1766    grid_values = interp.interpolate(q).flat
1767
1768
1769    if verbose:
1770        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1771                                                      max(grid_values))
1772
1773    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1774    P = interp.mesh.get_boundary_polygon()
1775    outside_indices = outside_polygon(grid_points, P, closed=True)
1776
1777    for i in outside_indices:
1778        grid_values[i] = NODATA_value
1779
1780
1781
1782
1783    if format.lower() == 'ers':
1784        # setup ERS header information
1785        grid_values = reshape(grid_values,(nrows, ncols))
1786        header = {}
1787        header['datum'] = '"' + datum + '"'
1788        # FIXME The use of hardwired UTM and zone number needs to be made optional
1789        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1790        header['projection'] = '"UTM-' + str(zone) + '"'
1791        header['coordinatetype'] = 'EN'
1792        if header['coordinatetype'] == 'LL':
1793            header['longitude'] = str(newxllcorner)
1794            header['latitude'] = str(newyllcorner)
1795        elif header['coordinatetype'] == 'EN':
1796            header['eastings'] = str(newxllcorner)
1797            header['northings'] = str(newyllcorner)
1798        header['nullcellvalue'] = str(NODATA_value)
1799        header['xdimension'] = str(cellsize)
1800        header['ydimension'] = str(cellsize)
1801        header['value'] = '"' + quantity + '"'
1802        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1803
1804
1805        #Write
1806        if verbose: print 'Writing %s' %demfile
1807        import ermapper_grids
1808        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1809
1810        fid.close()
1811    else:
1812        #Write to Ascii format
1813
1814        #Write prj file
1815        prjfile = basename_out + '.prj'
1816
1817        if verbose: print 'Writing %s' %prjfile
1818        prjid = open(prjfile, 'w')
1819        prjid.write('Projection    %s\n' %'UTM')
1820        prjid.write('Zone          %d\n' %zone)
1821        prjid.write('Datum         %s\n' %datum)
1822        prjid.write('Zunits        NO\n')
1823        prjid.write('Units         METERS\n')
1824        prjid.write('Spheroid      %s\n' %datum)
1825        prjid.write('Xshift        %d\n' %false_easting)
1826        prjid.write('Yshift        %d\n' %false_northing)
1827        prjid.write('Parameters\n')
1828        prjid.close()
1829
1830
1831
1832        if verbose: print 'Writing %s' %demfile
1833
1834        ascid = open(demfile, 'w')
1835
1836        ascid.write('ncols         %d\n' %ncols)
1837        ascid.write('nrows         %d\n' %nrows)
1838        ascid.write('xllcorner     %d\n' %newxllcorner)
1839        ascid.write('yllcorner     %d\n' %newyllcorner)
1840        ascid.write('cellsize      %f\n' %cellsize)
1841        ascid.write('NODATA_value  %d\n' %NODATA_value)
1842
1843
1844        #Get bounding polygon from mesh
1845        #P = interp.mesh.get_boundary_polygon()
1846        #inside_indices = inside_polygon(grid_points, P)
1847
1848        for i in range(nrows):
1849            if verbose and i%((nrows+10)/10)==0:
1850                print 'Doing row %d of %d' %(i, nrows)
1851
1852            base_index = (nrows-i-1)*ncols
1853
1854            slice = grid_values[base_index:base_index+ncols]
1855            s = array2string(slice, max_line_width=sys.maxint)
1856            ascid.write(s[1:-1] + '\n')
1857
1858
1859            #print
1860            #for j in range(ncols):
1861            #    index = base_index+j#
1862            #    print grid_values[index],
1863            #    ascid.write('%f ' %grid_values[index])
1864            #ascid.write('\n')
1865
1866        #Close
1867        ascid.close()
1868        fid.close()
1869
1870#Backwards compatibility
1871def sww2asc(basename_in, basename_out = None,
1872            quantity = None,
1873            timestep = None,
1874            reduction = None,
1875            cellsize = 10,
1876            verbose = False,
1877            origin = None):
1878    print 'sww2asc will soon be obsoleted - please use sww2dem'
1879    sww2dem(basename_in,
1880            basename_out = basename_out,
1881            quantity = quantity,
1882            timestep = timestep,
1883            reduction = reduction,
1884            cellsize = cellsize,
1885            verbose = verbose,
1886            origin = origin,
1887            datum = 'WGS84',
1888            format = 'asc')
1889
1890def sww2ers(basename_in, basename_out = None,
1891            quantity = None,
1892            timestep = None,
1893            reduction = None,
1894            cellsize = 10,
1895            verbose = False,
1896            origin = None,
1897            datum = 'WGS84'):
1898    print 'sww2ers will soon be obsoleted - please use sww2dem'
1899    sww2dem(basename_in,
1900            basename_out = basename_out,
1901            quantity = quantity,
1902            timestep = timestep,
1903            reduction = reduction,
1904            cellsize = cellsize,
1905            verbose = verbose,
1906            origin = origin,
1907            datum = datum,
1908            format = 'ers')
1909################################# END COMPATIBILITY ##############
1910
1911
1912
1913def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1914                                  use_cache = False,
1915                                  verbose = False):
1916    """Read Digitial Elevation model from the following ASCII format (.asc)   
1917
1918    Example:
1919
1920    ncols         3121
1921    nrows         1800
1922    xllcorner     722000
1923    yllcorner     5893000
1924    cellsize      25
1925    NODATA_value  -9999
1926    138.3698 137.4194 136.5062 135.5558 ..........
1927
1928    Convert basename_in + '.asc' to NetCDF format (.dem)
1929    mimicking the ASCII format closely.
1930
1931
1932    An accompanying file with same basename_in but extension .prj must exist
1933    and is used to fix the UTM zone, datum, false northings and eastings.
1934
1935    The prj format is assumed to be as
1936
1937    Projection    UTM
1938    Zone          56
1939    Datum         WGS84
1940    Zunits        NO
1941    Units         METERS
1942    Spheroid      WGS84
1943    Xshift        0.0000000000
1944    Yshift        10000000.0000000000
1945    Parameters
1946    """
1947
1948
1949
1950    kwargs = {'basename_out': basename_out, 'verbose': verbose}
1951
1952    if use_cache is True:
1953        from caching import cache
1954        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
1955                       dependencies = [basename_in + '.asc'],
1956                       verbose = verbose)
1957
1958    else:
1959        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
1960       
1961    return result
1962
1963   
1964   
1965
1966
1967
1968def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1969                                  verbose = False):
1970    """Read Digitial Elevation model from the following ASCII format (.asc)
1971
1972    Internal function. See public function convert_dem_from_ascii2netcdf for details.
1973    """
1974
1975    import os
1976    from Scientific.IO.NetCDF import NetCDFFile
1977    from Numeric import Float, array
1978
1979    #root, ext = os.path.splitext(basename_in)
1980    root = basename_in
1981
1982    ###########################################
1983    # Read Meta data
1984    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1985    metadatafile = open(root + '.prj')
1986    metalines = metadatafile.readlines()
1987    metadatafile.close()
1988
1989    L = metalines[0].strip().split()
1990    assert L[0].strip().lower() == 'projection'
1991    projection = L[1].strip()                   #TEXT
1992
1993    L = metalines[1].strip().split()
1994    assert L[0].strip().lower() == 'zone'
1995    zone = int(L[1].strip())
1996
1997    L = metalines[2].strip().split()
1998    assert L[0].strip().lower() == 'datum'
1999    datum = L[1].strip()                        #TEXT
2000
2001    L = metalines[3].strip().split()
2002    assert L[0].strip().lower() == 'zunits'     #IGNORE
2003    zunits = L[1].strip()                       #TEXT
2004
2005    L = metalines[4].strip().split()
2006    assert L[0].strip().lower() == 'units'
2007    units = L[1].strip()                        #TEXT
2008
2009    L = metalines[5].strip().split()
2010    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2011    spheroid = L[1].strip()                     #TEXT
2012
2013    L = metalines[6].strip().split()
2014    assert L[0].strip().lower() == 'xshift'
2015    false_easting = float(L[1].strip())
2016
2017    L = metalines[7].strip().split()
2018    assert L[0].strip().lower() == 'yshift'
2019    false_northing = float(L[1].strip())
2020
2021    #print false_easting, false_northing, zone, datum
2022
2023
2024    ###########################################
2025    #Read DEM data
2026
2027    datafile = open(basename_in + '.asc')
2028
2029    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2030    lines = datafile.readlines()
2031    datafile.close()
2032
2033    if verbose: print 'Got', len(lines), ' lines'
2034
2035    ncols = int(lines[0].split()[1].strip())
2036    nrows = int(lines[1].split()[1].strip())
2037    xllcorner = float(lines[2].split()[1].strip())
2038    yllcorner = float(lines[3].split()[1].strip())
2039    cellsize = float(lines[4].split()[1].strip())
2040    NODATA_value = int(lines[5].split()[1].strip())
2041
2042    assert len(lines) == nrows + 6
2043
2044
2045    ##########################################
2046
2047
2048    if basename_out == None:
2049        netcdfname = root + '.dem'
2050    else:
2051        netcdfname = basename_out + '.dem'
2052
2053    if verbose: print 'Store to NetCDF file %s' %netcdfname
2054    # NetCDF file definition
2055    fid = NetCDFFile(netcdfname, 'w')
2056
2057    #Create new file
2058    fid.institution = 'Geoscience Australia'
2059    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2060                      'of spatial point data'
2061
2062    fid.ncols = ncols
2063    fid.nrows = nrows
2064    fid.xllcorner = xllcorner
2065    fid.yllcorner = yllcorner
2066    fid.cellsize = cellsize
2067    fid.NODATA_value = NODATA_value
2068
2069    fid.zone = zone
2070    fid.false_easting = false_easting
2071    fid.false_northing = false_northing
2072    fid.projection = projection
2073    fid.datum = datum
2074    fid.units = units
2075
2076
2077    # dimension definitions
2078    fid.createDimension('number_of_rows', nrows)
2079    fid.createDimension('number_of_columns', ncols)
2080
2081    # variable definitions
2082    fid.createVariable('elevation', Float, ('number_of_rows',
2083                                            'number_of_columns'))
2084
2085    # Get handles to the variables
2086    elevation = fid.variables['elevation']
2087
2088    #Store data
2089    n = len(lines[6:])
2090    for i, line in enumerate(lines[6:]):
2091        fields = line.split()
2092        if verbose and i%((n+10)/10)==0:
2093            print 'Processing row %d of %d' %(i, nrows)
2094
2095        elevation[i, :] = array([float(x) for x in fields])
2096
2097    fid.close()
2098
2099
2100
2101
2102
2103def ferret2sww(basename_in, basename_out = None,
2104               verbose = False,
2105               minlat = None, maxlat = None,
2106               minlon = None, maxlon = None,
2107               mint = None, maxt = None, mean_stage = 0,
2108               origin = None, zscale = 1,
2109               fail_on_NaN = True,
2110               NaN_filler = 0,
2111               elevation = None,
2112               inverted_bathymetry = False
2113               ): #FIXME: Bathymetry should be obtained
2114                                  #from MOST somehow.
2115                                  #Alternatively from elsewhere
2116                                  #or, as a last resort,
2117                                  #specified here.
2118                                  #The value of -100 will work
2119                                  #for the Wollongong tsunami
2120                                  #scenario but is very hacky
2121    """Convert 'Ferret' NetCDF format for wave propagation to
2122    sww format native to pyvolution.
2123
2124    Specify only basename_in and read files of the form
2125    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2126    relative height, x-velocity and y-velocity, respectively.
2127
2128    Also convert latitude and longitude to UTM. All coordinates are
2129    assumed to be given in the GDA94 datum.
2130
2131    min's and max's: If omitted - full extend is used.
2132    To include a value min may equal it, while max must exceed it.
2133    Lat and lon are assuemd to be in decimal degrees
2134
2135    origin is a 3-tuple with geo referenced
2136    UTM coordinates (zone, easting, northing)
2137
2138    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2139    which means that longitude is the fastest
2140    varying dimension (row major order, so to speak)
2141
2142    ferret2sww uses grid points as vertices in a triangular grid
2143    counting vertices from lower left corner upwards, then right
2144    """
2145
2146    import os
2147    from Scientific.IO.NetCDF import NetCDFFile
2148    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2149    from Numeric import allclose, around
2150
2151    precision = Float
2152
2153    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2154
2155    if minlat != None:
2156        assert -90 < minlat < 90 , msg
2157    if maxlat != None:
2158        assert -90 < maxlat < 90 , msg
2159    if minlon != None:
2160        assert -180 < minlon < 180 , msg
2161    if maxlon != None:
2162        assert -180 < maxlon < 180 , msg
2163 
2164
2165    #Get NetCDF data
2166    if verbose: print 'Reading files %s_*.nc' %basename_in
2167    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2168    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2169    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2170    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2171
2172    if basename_out is None:
2173        swwname = basename_in + '.sww'
2174    else:
2175        swwname = basename_out + '.sww'
2176
2177    times = file_h.variables['TIME']
2178    latitudes = file_h.variables['LAT']
2179    longitudes = file_h.variables['LON']
2180
2181
2182
2183    #Precision used by most for lat/lon is 4 or 5 decimals
2184    e_lat = around(file_e.variables['LAT'][:], 5)
2185    e_lon = around(file_e.variables['LON'][:], 5)
2186
2187    #Check that files are compatible
2188    assert allclose(latitudes, file_u.variables['LAT'])
2189    assert allclose(latitudes, file_v.variables['LAT'])
2190    assert allclose(latitudes, e_lat)
2191
2192    assert allclose(longitudes, file_u.variables['LON'])
2193    assert allclose(longitudes, file_v.variables['LON'])
2194    assert allclose(longitudes, e_lon)
2195
2196
2197
2198    if mint == None:
2199        jmin = 0
2200    else:
2201        jmin = searchsorted(times, mint)
2202
2203    if maxt == None:
2204        jmax=len(times)
2205    else:
2206        jmax = searchsorted(times, maxt)
2207
2208    if minlat == None:
2209        kmin=0
2210    else:
2211        kmin = searchsorted(latitudes, minlat)
2212
2213    if maxlat == None:
2214        kmax = len(latitudes)
2215    else:
2216        kmax = searchsorted(latitudes, maxlat)
2217
2218    if minlon == None:
2219        lmin=0
2220    else:
2221        lmin = searchsorted(longitudes, minlon)
2222
2223    if maxlon == None:
2224        lmax = len(longitudes)
2225    else:
2226        lmax = searchsorted(longitudes, maxlon)
2227
2228
2229
2230    times = times[jmin:jmax]
2231    latitudes = latitudes[kmin:kmax]
2232    longitudes = longitudes[lmin:lmax]
2233
2234
2235    if verbose: print 'cropping'
2236    zname = 'ELEVATION'
2237
2238
2239    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2240    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2241    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2242    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2243
2244    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2245    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2246    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2247    #        from Numeric import asarray
2248    #        elevations=elevations.tolist()
2249    #        elevations.reverse()
2250    #        elevations=asarray(elevations)
2251    #    else:
2252    #        from Numeric import asarray
2253    #        elevations=elevations.tolist()
2254    #        elevations.reverse()
2255    #        elevations=asarray(elevations)
2256    #        'print hmmm'
2257
2258
2259
2260    #Get missing values
2261    nan_ha = file_h.variables['HA'].missing_value[0]
2262    nan_ua = file_u.variables['UA'].missing_value[0]
2263    nan_va = file_v.variables['VA'].missing_value[0]
2264    if hasattr(file_e.variables[zname],'missing_value'):
2265        nan_e  = file_e.variables[zname].missing_value[0]
2266    else:
2267        nan_e = None
2268
2269    #Cleanup
2270    from Numeric import sometrue
2271
2272    missing = (amplitudes == nan_ha)
2273    if sometrue (missing):
2274        if fail_on_NaN:
2275            msg = 'NetCDFFile %s contains missing values'\
2276                  %(basename_in+'_ha.nc')
2277            raise msg
2278        else:
2279            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2280
2281    missing = (uspeed == nan_ua)
2282    if sometrue (missing):
2283        if fail_on_NaN:
2284            msg = 'NetCDFFile %s contains missing values'\
2285                  %(basename_in+'_ua.nc')
2286            raise msg
2287        else:
2288            uspeed = uspeed*(missing==0) + missing*NaN_filler
2289
2290    missing = (vspeed == nan_va)
2291    if sometrue (missing):
2292        if fail_on_NaN:
2293            msg = 'NetCDFFile %s contains missing values'\
2294                  %(basename_in+'_va.nc')
2295            raise msg
2296        else:
2297            vspeed = vspeed*(missing==0) + missing*NaN_filler
2298
2299
2300    missing = (elevations == nan_e)
2301    if sometrue (missing):
2302        if fail_on_NaN:
2303            msg = 'NetCDFFile %s contains missing values'\
2304                  %(basename_in+'_e.nc')
2305            raise msg
2306        else:
2307            elevations = elevations*(missing==0) + missing*NaN_filler
2308
2309    #######
2310
2311
2312
2313    number_of_times = times.shape[0]
2314    number_of_latitudes = latitudes.shape[0]
2315    number_of_longitudes = longitudes.shape[0]
2316
2317    assert amplitudes.shape[0] == number_of_times
2318    assert amplitudes.shape[1] == number_of_latitudes
2319    assert amplitudes.shape[2] == number_of_longitudes
2320
2321    if verbose:
2322        print '------------------------------------------------'
2323        print 'Statistics:'
2324        print '  Extent (lat/lon):'
2325        print '    lat in [%f, %f], len(lat) == %d'\
2326              %(min(latitudes.flat), max(latitudes.flat),
2327                len(latitudes.flat))
2328        print '    lon in [%f, %f], len(lon) == %d'\
2329              %(min(longitudes.flat), max(longitudes.flat),
2330                len(longitudes.flat))
2331        print '    t in [%f, %f], len(t) == %d'\
2332              %(min(times.flat), max(times.flat), len(times.flat))
2333
2334        q = amplitudes.flat
2335        name = 'Amplitudes (ha) [cm]'
2336        print %s in [%f, %f]' %(name, min(q), max(q))
2337
2338        q = uspeed.flat
2339        name = 'Speeds (ua) [cm/s]'
2340        print %s in [%f, %f]' %(name, min(q), max(q))
2341
2342        q = vspeed.flat
2343        name = 'Speeds (va) [cm/s]'
2344        print %s in [%f, %f]' %(name, min(q), max(q))
2345
2346        q = elevations.flat
2347        name = 'Elevations (e) [m]'
2348        print %s in [%f, %f]' %(name, min(q), max(q))
2349
2350
2351    #print number_of_latitudes, number_of_longitudes
2352    number_of_points = number_of_latitudes*number_of_longitudes
2353    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2354
2355
2356    file_h.close()
2357    file_u.close()
2358    file_v.close()
2359    file_e.close()
2360
2361
2362    # NetCDF file definition
2363    outfile = NetCDFFile(swwname, 'w')
2364
2365    #Create new file
2366    outfile.institution = 'Geoscience Australia'
2367    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2368                          %(basename_in + '_ha.nc',
2369                            basename_in + '_ua.nc',
2370                            basename_in + '_va.nc',
2371                            basename_in + '_e.nc')
2372
2373
2374    #For sww compatibility
2375    outfile.smoothing = 'Yes'
2376    outfile.order = 1
2377
2378    #Start time in seconds since the epoch (midnight 1/1/1970)
2379    outfile.starttime = starttime = times[0]
2380    times = times - starttime  #Store relative times
2381
2382    # dimension definitions
2383    outfile.createDimension('number_of_volumes', number_of_volumes)
2384
2385    outfile.createDimension('number_of_vertices', 3)
2386    outfile.createDimension('number_of_points', number_of_points)
2387
2388
2389    #outfile.createDimension('number_of_timesteps', len(times))
2390    outfile.createDimension('number_of_timesteps', len(times))
2391
2392    # variable definitions
2393    outfile.createVariable('x', precision, ('number_of_points',))
2394    outfile.createVariable('y', precision, ('number_of_points',))
2395    outfile.createVariable('elevation', precision, ('number_of_points',))
2396
2397    #FIXME: Backwards compatibility
2398    outfile.createVariable('z', precision, ('number_of_points',))
2399    #################################
2400
2401    outfile.createVariable('volumes', Int, ('number_of_volumes',
2402                                            'number_of_vertices'))
2403
2404    outfile.createVariable('time', precision,
2405                           ('number_of_timesteps',))
2406
2407    outfile.createVariable('stage', precision,
2408                           ('number_of_timesteps',
2409                            'number_of_points'))
2410
2411    outfile.createVariable('xmomentum', precision,
2412                           ('number_of_timesteps',
2413                            'number_of_points'))
2414
2415    outfile.createVariable('ymomentum', precision,
2416                           ('number_of_timesteps',
2417                            'number_of_points'))
2418
2419
2420    #Store
2421    from coordinate_transforms.redfearn import redfearn
2422    x = zeros(number_of_points, Float)  #Easting
2423    y = zeros(number_of_points, Float)  #Northing
2424
2425
2426    if verbose: print 'Making triangular grid'
2427    #Check zone boundaries
2428    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2429
2430    vertices = {}
2431    i = 0
2432    for k, lat in enumerate(latitudes):       #Y direction
2433        for l, lon in enumerate(longitudes):  #X direction
2434
2435            vertices[l,k] = i
2436
2437            zone, easting, northing = redfearn(lat,lon)
2438
2439            msg = 'Zone boundary crossed at longitude =', lon
2440            #assert zone == refzone, msg
2441            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2442            x[i] = easting
2443            y[i] = northing
2444            i += 1
2445
2446
2447    #Construct 2 triangles per 'rectangular' element
2448    volumes = []
2449    for l in range(number_of_longitudes-1):    #X direction
2450        for k in range(number_of_latitudes-1): #Y direction
2451            v1 = vertices[l,k+1]
2452            v2 = vertices[l,k]
2453            v3 = vertices[l+1,k+1]
2454            v4 = vertices[l+1,k]
2455
2456            volumes.append([v1,v2,v3]) #Upper element
2457            volumes.append([v4,v3,v2]) #Lower element
2458
2459    volumes = array(volumes)
2460
2461    if origin == None:
2462        zone = refzone
2463        xllcorner = min(x)
2464        yllcorner = min(y)
2465    else:
2466        zone = origin[0]
2467        xllcorner = origin[1]
2468        yllcorner = origin[2]
2469
2470
2471    outfile.xllcorner = xllcorner
2472    outfile.yllcorner = yllcorner
2473    outfile.zone = zone
2474
2475
2476    if elevation is not None:
2477        z = elevation
2478    else:
2479        if inverted_bathymetry:
2480            z = -1*elevations
2481        else:
2482            z = elevations
2483        #FIXME: z should be obtained from MOST and passed in here
2484
2485    from Numeric import resize
2486    z = resize(z,outfile.variables['z'][:].shape)
2487    outfile.variables['x'][:] = x - xllcorner
2488    outfile.variables['y'][:] = y - yllcorner
2489    outfile.variables['z'][:] = z             #FIXME HACK
2490    outfile.variables['elevation'][:] = z
2491    outfile.variables['time'][:] = times   #Store time relative
2492    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2493
2494
2495
2496    #Time stepping
2497    stage = outfile.variables['stage']
2498    xmomentum = outfile.variables['xmomentum']
2499    ymomentum = outfile.variables['ymomentum']
2500
2501    if verbose: print 'Converting quantities'
2502    n = len(times)
2503    for j in range(n):
2504        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2505        i = 0
2506        for k in range(number_of_latitudes):      #Y direction
2507            for l in range(number_of_longitudes): #X direction
2508                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2509                stage[j,i] = w
2510                h = w - z[i]
2511                xmomentum[j,i] = uspeed[j,k,l]/100*h
2512                ymomentum[j,i] = vspeed[j,k,l]/100*h
2513                i += 1
2514
2515    #outfile.close()
2516
2517    #FIXME: Refactor using code from file_function.statistics
2518    #Something like print swwstats(swwname)
2519    if verbose:
2520        x = outfile.variables['x'][:]
2521        y = outfile.variables['y'][:]
2522        times = outfile.variables['time'][:]
2523        print '------------------------------------------------'
2524        print 'Statistics of output file:'
2525        print '  Name: %s' %swwname
2526        print '  Reference:'
2527        print '    Lower left corner: [%f, %f]'\
2528              %(xllcorner, yllcorner)
2529        print '    Start time: %f' %starttime
2530        print '  Extent:'
2531        print '    x [m] in [%f, %f], len(x) == %d'\
2532              %(min(x.flat), max(x.flat), len(x.flat))
2533        print '    y [m] in [%f, %f], len(y) == %d'\
2534              %(min(y.flat), max(y.flat), len(y.flat))
2535        print '    t [s] in [%f, %f], len(t) == %d'\
2536              %(min(times), max(times), len(times))
2537        print '  Quantities [SI units]:'
2538        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2539            q = outfile.variables[name][:].flat
2540            print '    %s in [%f, %f]' %(name, min(q), max(q))
2541
2542
2543
2544    outfile.close()
2545
2546
2547
2548
2549
2550def timefile2netcdf(filename, quantity_names = None):
2551    """Template for converting typical text files with time series to
2552    NetCDF tms file.
2553
2554
2555    The file format is assumed to be either two fields separated by a comma:
2556
2557        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2558
2559    E.g
2560
2561      31/08/04 00:00:00, 1.328223 0 0
2562      31/08/04 00:15:00, 1.292912 0 0
2563
2564    will provide a time dependent function f(t) with three attributes
2565
2566    filename is assumed to be the rootname with extenisons .txt and .sww
2567    """
2568
2569    import time, calendar
2570    from Numeric import array
2571    from config import time_format
2572    from utilities.numerical_tools import ensure_numeric   
2573
2574
2575
2576    fid = open(filename + '.txt')
2577    line = fid.readline()
2578    fid.close()
2579
2580    fields = line.split(',')
2581    msg = 'File %s must have the format date, value0 value1 value2 ...'
2582    assert len(fields) == 2, msg
2583
2584    try:
2585        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2586    except ValueError:
2587        msg = 'First field in file %s must be' %filename
2588        msg += ' date-time with format %s.\n' %time_format
2589        msg += 'I got %s instead.' %fields[0]
2590        raise msg
2591
2592
2593    #Split values
2594    values = []
2595    for value in fields[1].split():
2596        values.append(float(value))
2597
2598    q = ensure_numeric(values)
2599
2600    msg = 'ERROR: File must contain at least one independent value'
2601    assert len(q.shape) == 1, msg
2602
2603
2604
2605    #Read times proper
2606    from Numeric import zeros, Float, alltrue
2607    from config import time_format
2608    import time, calendar
2609
2610    fid = open(filename + '.txt')
2611    lines = fid.readlines()
2612    fid.close()
2613
2614    N = len(lines)
2615    d = len(q)
2616
2617    T = zeros(N, Float)       #Time
2618    Q = zeros((N, d), Float)  #Values
2619
2620    for i, line in enumerate(lines):
2621        fields = line.split(',')
2622        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2623
2624        T[i] = realtime - starttime
2625
2626        for j, value in enumerate(fields[1].split()):
2627            Q[i, j] = float(value)
2628
2629    msg = 'File %s must list time as a monotonuosly ' %filename
2630    msg += 'increasing sequence'
2631    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2632
2633
2634    #Create NetCDF file
2635    from Scientific.IO.NetCDF import NetCDFFile
2636
2637    fid = NetCDFFile(filename + '.tms', 'w')
2638
2639
2640    fid.institution = 'Geoscience Australia'
2641    fid.description = 'Time series'
2642
2643
2644    #Reference point
2645    #Start time in seconds since the epoch (midnight 1/1/1970)
2646    #FIXME: Use Georef
2647    fid.starttime = starttime
2648
2649
2650    # dimension definitions
2651    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2652    #fid.createDimension('number_of_vertices', 3)
2653
2654
2655    fid.createDimension('number_of_timesteps', len(T))
2656
2657    fid.createVariable('time', Float, ('number_of_timesteps',))
2658
2659    fid.variables['time'][:] = T
2660
2661    for i in range(Q.shape[1]):
2662        try:
2663            name = quantity_names[i]
2664        except:
2665            name = 'Attribute%d'%i
2666
2667        fid.createVariable(name, Float, ('number_of_timesteps',))
2668        fid.variables[name][:] = Q[:,i]
2669
2670    fid.close()
2671
2672
2673def extent_sww(file_name):
2674    """
2675    Read in an sww file.
2676
2677    Input;
2678    file_name - the sww file
2679
2680    Output;
2681    z - Vector of bed elevation
2682    volumes - Array.  Each row has 3 values, representing
2683    the vertices that define the volume
2684    time - Vector of the times where there is stage information
2685    stage - array with respect to time and vertices (x,y)
2686    """
2687
2688
2689    from Scientific.IO.NetCDF import NetCDFFile
2690
2691    #Check contents
2692    #Get NetCDF
2693    fid = NetCDFFile(file_name, 'r')
2694
2695    # Get the variables
2696    x = fid.variables['x'][:]
2697    y = fid.variables['y'][:]
2698    stage = fid.variables['stage'][:]
2699    #print "stage",stage
2700    #print "stage.shap",stage.shape
2701    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2702    #print "min(stage)",min(stage)
2703
2704    fid.close()
2705
2706    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2707
2708
2709def sww2domain(filename,boundary=None,t=None,\
2710               fail_if_NaN=True,NaN_filler=0\
2711               ,verbose = False,very_verbose = False):
2712    """
2713    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2714
2715    Boundary is not recommended if domain.smooth is not selected, as it
2716    uses unique coordinates, but not unique boundaries. This means that
2717    the boundary file will not be compatable with the coordinates, and will
2718    give a different final boundary, or crash.
2719    """
2720    NaN=9.969209968386869e+036
2721    #initialise NaN.
2722
2723    from Scientific.IO.NetCDF import NetCDFFile
2724    from shallow_water import Domain
2725    from Numeric import asarray, transpose, resize
2726
2727    if verbose: print 'Reading from ', filename
2728    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2729    time = fid.variables['time']       #Timesteps
2730    if t is None:
2731        t = time[-1]
2732    time_interp = get_time_interp(time,t)
2733
2734    # Get the variables as Numeric arrays
2735    x = fid.variables['x'][:]             #x-coordinates of vertices
2736    y = fid.variables['y'][:]             #y-coordinates of vertices
2737    elevation = fid.variables['elevation']     #Elevation
2738    stage = fid.variables['stage']     #Water level
2739    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2740    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2741
2742    starttime = fid.starttime[0]
2743    volumes = fid.variables['volumes'][:] #Connectivity
2744    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2745
2746    conserved_quantities = []
2747    interpolated_quantities = {}
2748    other_quantities = []
2749
2750    # get geo_reference
2751    #sww files don't have to have a geo_ref
2752    try:
2753        geo_reference = Geo_reference(NetCDFObject=fid)
2754    except: #AttributeError, e:
2755        geo_reference = None
2756
2757    if verbose: print '    getting quantities'
2758    for quantity in fid.variables.keys():
2759        dimensions = fid.variables[quantity].dimensions
2760        if 'number_of_timesteps' in dimensions:
2761            conserved_quantities.append(quantity)
2762            interpolated_quantities[quantity]=\
2763                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2764        else: other_quantities.append(quantity)
2765
2766    other_quantities.remove('x')
2767    other_quantities.remove('y')
2768    other_quantities.remove('z')
2769    other_quantities.remove('volumes')
2770
2771    conserved_quantities.remove('time')
2772
2773    if verbose: print '    building domain'
2774    #    From domain.Domain:
2775    #    domain = Domain(coordinates, volumes,\
2776    #                    conserved_quantities = conserved_quantities,\
2777    #                    other_quantities = other_quantities,zone=zone,\
2778    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2779
2780    #   From shallow_water.Domain:
2781    coordinates=coordinates.tolist()
2782    volumes=volumes.tolist()
2783    #FIXME:should this be in mesh?(peter row)
2784    if fid.smoothing == 'Yes': unique = False
2785    else: unique = True
2786    if unique:
2787        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2788
2789
2790    try:   
2791        domain = Domain(coordinates, volumes, boundary)
2792    except AssertionError, e:
2793        fid.close()
2794        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2795        raise msg
2796
2797    if not boundary is None:
2798        domain.boundary = boundary
2799
2800    domain.geo_reference = geo_reference
2801
2802    domain.starttime=float(starttime)+float(t)
2803    domain.time=0.0
2804
2805    for quantity in other_quantities:
2806        try:
2807            NaN = fid.variables[quantity].missing_value
2808        except:
2809            pass #quantity has no missing_value number
2810        X = fid.variables[quantity][:]
2811        if very_verbose:
2812            print '       ',quantity
2813            print '        NaN =',NaN
2814            print '        max(X)'
2815            print '       ',max(X)
2816            print '        max(X)==NaN'
2817            print '       ',max(X)==NaN
2818            print ''
2819        if (max(X)==NaN) or (min(X)==NaN):
2820            if fail_if_NaN:
2821                msg = 'quantity "%s" contains no_data entry'%quantity
2822                raise msg
2823            else:
2824                data = (X<>NaN)
2825                X = (X*data)+(data==0)*NaN_filler
2826        if unique:
2827            X = resize(X,(len(X)/3,3))
2828        domain.set_quantity(quantity,X)
2829    #
2830    for quantity in conserved_quantities:
2831        try:
2832            NaN = fid.variables[quantity].missing_value
2833        except:
2834            pass #quantity has no missing_value number
2835        X = interpolated_quantities[quantity]
2836        if very_verbose:
2837            print '       ',quantity
2838            print '        NaN =',NaN
2839            print '        max(X)'
2840            print '       ',max(X)
2841            print '        max(X)==NaN'
2842            print '       ',max(X)==NaN
2843            print ''
2844        if (max(X)==NaN) or (min(X)==NaN):
2845            if fail_if_NaN:
2846                msg = 'quantity "%s" contains no_data entry'%quantity
2847                raise msg
2848            else:
2849                data = (X<>NaN)
2850                X = (X*data)+(data==0)*NaN_filler
2851        if unique:
2852            X = resize(X,(X.shape[0]/3,3))
2853        domain.set_quantity(quantity,X)
2854       
2855    fid.close()
2856    return domain
2857
2858def interpolated_quantity(saved_quantity,time_interp):
2859
2860    #given an index and ratio, interpolate quantity with respect to time.
2861    index,ratio = time_interp
2862    Q = saved_quantity
2863    if ratio > 0:
2864        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2865    else:
2866        q = Q[index]
2867    #Return vector of interpolated values
2868    return q
2869
2870def get_time_interp(time,t=None):
2871    #Finds the ratio and index for time interpolation.
2872    #It is borrowed from previous pyvolution code.
2873    if t is None:
2874        t=time[-1]
2875        index = -1
2876        ratio = 0.
2877    else:
2878        T = time
2879        tau = t
2880        index=0
2881        msg = 'Time interval derived from file %s [%s:%s]'\
2882            %('FIXMEfilename', T[0], T[-1])
2883        msg += ' does not match model time: %s' %tau
2884        if tau < time[0]: raise msg
2885        if tau > time[-1]: raise msg
2886        while tau > time[index]: index += 1
2887        while tau < time[index]: index -= 1
2888        if tau == time[index]:
2889            #Protect against case where tau == time[-1] (last time)
2890            # - also works in general when tau == time[i]
2891            ratio = 0
2892        else:
2893            #t is now between index and index+1
2894            ratio = (tau - time[index])/(time[index+1] - time[index])
2895    return (index,ratio)
2896
2897
2898def weed(coordinates,volumes,boundary = None):
2899    if type(coordinates)=='array':
2900        coordinates = coordinates.tolist()
2901    if type(volumes)=='array':
2902        volumes = volumes.tolist()
2903
2904    unique = False
2905    point_dict = {}
2906    same_point = {}
2907    for i in range(len(coordinates)):
2908        point = tuple(coordinates[i])
2909        if point_dict.has_key(point):
2910            unique = True
2911            same_point[i]=point
2912            #to change all point i references to point j
2913        else:
2914            point_dict[point]=i
2915            same_point[i]=point
2916
2917    coordinates = []
2918    i = 0
2919    for point in point_dict.keys():
2920        point = tuple(point)
2921        coordinates.append(list(point))
2922        point_dict[point]=i
2923        i+=1
2924
2925
2926    for volume in volumes:
2927        for i in range(len(volume)):
2928            index = volume[i]
2929            if index>-1:
2930                volume[i]=point_dict[same_point[index]]
2931
2932    new_boundary = {}
2933    if not boundary is None:
2934        for segment in boundary.keys():
2935            point0 = point_dict[same_point[segment[0]]]
2936            point1 = point_dict[same_point[segment[1]]]
2937            label = boundary[segment]
2938            #FIXME should the bounday attributes be concaterated
2939            #('exterior, pond') or replaced ('pond')(peter row)
2940
2941            if new_boundary.has_key((point0,point1)):
2942                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2943                                              #+','+label
2944
2945            elif new_boundary.has_key((point1,point0)):
2946                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2947                                              #+','+label
2948            else: new_boundary[(point0,point1)]=label
2949
2950        boundary = new_boundary
2951
2952    return coordinates,volumes,boundary
2953
2954
2955def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2956                 verbose=False):
2957    """Read Digitial Elevation model from the following NetCDF format (.dem)
2958
2959    Example:
2960
2961    ncols         3121
2962    nrows         1800
2963    xllcorner     722000
2964    yllcorner     5893000
2965    cellsize      25
2966    NODATA_value  -9999
2967    138.3698 137.4194 136.5062 135.5558 ..........
2968
2969    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2970    """
2971
2972    import os
2973    from Scientific.IO.NetCDF import NetCDFFile
2974    from Numeric import Float, zeros, sum, reshape, equal
2975
2976    root = basename_in
2977    inname = root + '.dem'
2978
2979    #Open existing netcdf file to read
2980    infile = NetCDFFile(inname, 'r')
2981    if verbose: print 'Reading DEM from %s' %inname
2982
2983    #Read metadata
2984    ncols = infile.ncols[0]
2985    nrows = infile.nrows[0]
2986    xllcorner = infile.xllcorner[0]
2987    yllcorner = infile.yllcorner[0]
2988    cellsize = infile.cellsize[0]
2989    NODATA_value = infile.NODATA_value[0]
2990    zone = infile.zone[0]
2991    false_easting = infile.false_easting[0]
2992    false_northing = infile.false_northing[0]
2993    projection = infile.projection
2994    datum = infile.datum
2995    units = infile.units
2996
2997    dem_elevation = infile.variables['elevation']
2998
2999    #Get output file name
3000    if basename_out == None:
3001        outname = root + '_' + repr(cellsize_new) + '.dem'
3002    else:
3003        outname = basename_out + '.dem'
3004
3005    if verbose: print 'Write decimated NetCDF file to %s' %outname
3006
3007    #Determine some dimensions for decimated grid
3008    (nrows_stencil, ncols_stencil) = stencil.shape
3009    x_offset = ncols_stencil / 2
3010    y_offset = nrows_stencil / 2
3011    cellsize_ratio = int(cellsize_new / cellsize)
3012    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3013    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3014
3015    #Open netcdf file for output
3016    outfile = NetCDFFile(outname, 'w')
3017
3018    #Create new file
3019    outfile.institution = 'Geoscience Australia'
3020    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3021                           'of spatial point data'
3022    #Georeferencing
3023    outfile.zone = zone
3024    outfile.projection = projection
3025    outfile.datum = datum
3026    outfile.units = units
3027
3028    outfile.cellsize = cellsize_new
3029    outfile.NODATA_value = NODATA_value
3030    outfile.false_easting = false_easting
3031    outfile.false_northing = false_northing
3032
3033    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3034    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3035    outfile.ncols = ncols_new
3036    outfile.nrows = nrows_new
3037
3038    # dimension definition
3039    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3040
3041    # variable definition
3042    outfile.createVariable('elevation', Float, ('number_of_points',))
3043
3044    # Get handle to the variable
3045    elevation = outfile.variables['elevation']
3046
3047    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3048
3049    #Store data
3050    global_index = 0
3051    for i in range(nrows_new):
3052        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3053        lower_index = global_index
3054        telev =  zeros(ncols_new, Float)
3055        local_index = 0
3056        trow = i * cellsize_ratio
3057
3058        for j in range(ncols_new):
3059            tcol = j * cellsize_ratio
3060            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3061
3062            #if dem contains 1 or more NODATA_values set value in
3063            #decimated dem to NODATA_value, else compute decimated
3064            #value using stencil
3065            if sum(sum(equal(tmp, NODATA_value))) > 0:
3066                telev[local_index] = NODATA_value
3067            else:
3068                telev[local_index] = sum(sum(tmp * stencil))
3069
3070            global_index += 1
3071            local_index += 1
3072
3073        upper_index = global_index
3074
3075        elevation[lower_index:upper_index] = telev
3076
3077    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3078
3079    infile.close()
3080    outfile.close()
3081
3082
3083
3084
3085def tsh2sww(filename, verbose=False): #test_tsh2sww
3086    """
3087    to check if a tsh/msh file 'looks' good.
3088    """
3089
3090    #FIXME Move to data_manager
3091    from shallow_water import Domain
3092    from pmesh2domain import pmesh_to_domain_instance
3093    import time, os
3094    from data_manager import get_dataobject
3095    from os import sep, path
3096    from utilities.numerical_tools import mean
3097
3098    if verbose == True:print 'Creating domain from', filename
3099    domain = pmesh_to_domain_instance(filename, Domain)
3100    if verbose == True:print "Number of triangles = ", len(domain)
3101
3102    domain.smooth = True
3103    domain.format = 'sww'   #Native netcdf visualisation format
3104    file_path, filename = path.split(filename)
3105    filename, ext = path.splitext(filename)
3106    domain.filename = filename
3107    domain.reduction = mean
3108    if verbose == True:print "file_path",file_path
3109    if file_path == "":file_path = "."
3110    domain.set_datadir(file_path)
3111
3112    if verbose == True:
3113        print "Output written to " + domain.get_datadir() + sep + \
3114              domain.filename + "." + domain.format
3115    sww = get_dataobject(domain)
3116    sww.store_connectivity()
3117    sww.store_timestep('stage')
3118
3119
3120def asc_csiro2sww(bath_dir,
3121                  elevation_dir,
3122                  ucur_dir,
3123                  vcur_dir,
3124                  sww_file,
3125                  minlat = None, maxlat = None,
3126                  minlon = None, maxlon = None,
3127                  zscale=1,
3128                  mean_stage = 0,
3129                  fail_on_NaN = True,
3130                  elevation_NaN_filler = 0,
3131                  bath_prefix='ba',
3132                  elevation_prefix='el',
3133                  verbose=False):
3134    """
3135    Produce an sww boundary file, from esri ascii data from CSIRO.
3136
3137    Also convert latitude and longitude to UTM. All coordinates are
3138    assumed to be given in the GDA94 datum.
3139
3140    assume:
3141    All files are in esri ascii format
3142
3143    4 types of information
3144    bathymetry
3145    elevation
3146    u velocity
3147    v velocity
3148
3149    Assumptions
3150    The metadata of all the files is the same
3151    Each type is in a seperate directory
3152    One bath file with extention .000
3153    The time period is less than 24hrs and uniform.
3154    """
3155    from Scientific.IO.NetCDF import NetCDFFile
3156
3157    from coordinate_transforms.redfearn import redfearn
3158
3159    precision = Float # So if we want to change the precision its done here
3160
3161    # go in to the bath dir and load the only file,
3162    bath_files = os.listdir(bath_dir)
3163    #print "bath_files",bath_files
3164
3165    #fixme: make more general?
3166    bath_file = bath_files[0]
3167    bath_dir_file =  bath_dir + os.sep + bath_file
3168    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3169    #print "bath_metadata",bath_metadata
3170    #print "bath_grid",bath_grid
3171
3172    #Use the date.time of the bath file as a basis for
3173    #the start time for other files
3174    base_start = bath_file[-12:]
3175
3176    #go into the elevation dir and load the 000 file
3177    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3178                         + base_start
3179    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3180    #print "elevation_dir_file",elevation_dir_file
3181    #print "elevation_grid", elevation_grid
3182
3183    elevation_files = os.listdir(elevation_dir)
3184    ucur_files = os.listdir(ucur_dir)
3185    vcur_files = os.listdir(vcur_dir)
3186
3187    # the first elevation file should be the
3188    # file with the same base name as the bath data
3189    #print "elevation_files[0]",elevation_files[0]
3190    #print "'el' + base_start",'el' + base_start
3191    assert elevation_files[0] == 'el' + base_start
3192
3193    #assert bath_metadata == elevation_metadata
3194
3195
3196
3197    number_of_latitudes = bath_grid.shape[0]
3198    number_of_longitudes = bath_grid.shape[1]
3199    #number_of_times = len(os.listdir(elevation_dir))
3200    #number_of_points = number_of_latitudes*number_of_longitudes
3201    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3202
3203    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3204                  for x in range(number_of_longitudes)]
3205    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3206                 for y in range(number_of_latitudes)]
3207
3208     # reverse order of lat, so the fist lat represents the first grid row
3209    latitudes.reverse()
3210
3211    #print "latitudes - before _get_min_max_indexes",latitudes
3212    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3213                                                 minlat=minlat, maxlat=maxlat,
3214                                                 minlon=minlon, maxlon=maxlon)
3215
3216
3217    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3218    #print "bath_grid",bath_grid
3219    latitudes = latitudes[kmin:kmax]
3220    longitudes = longitudes[lmin:lmax]
3221    number_of_latitudes = len(latitudes)
3222    number_of_longitudes = len(longitudes)
3223    number_of_times = len(os.listdir(elevation_dir))
3224    number_of_points = number_of_latitudes*number_of_longitudes
3225    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3226    #print "number_of_points",number_of_points
3227
3228    #Work out the times
3229    if len(elevation_files) > 1:
3230        # Assume: The time period is less than 24hrs.
3231        time_period = (int(elevation_files[1][-3:]) - \
3232                      int(elevation_files[0][-3:]))*60*60
3233        times = [x*time_period for x in range(len(elevation_files))]
3234    else:
3235        times = [0.0]
3236    #print "times", times
3237    #print "number_of_latitudes", number_of_latitudes
3238    #print "number_of_longitudes", number_of_longitudes
3239    #print "number_of_times", number_of_times
3240    #print "latitudes", latitudes
3241    #print "longitudes", longitudes
3242
3243
3244    if verbose:
3245        print '------------------------------------------------'
3246        print 'Statistics:'
3247        print '  Extent (lat/lon):'
3248        print '    lat in [%f, %f], len(lat) == %d'\
3249              %(min(latitudes), max(latitudes),
3250                len(latitudes))
3251        print '    lon in [%f, %f], len(lon) == %d'\
3252              %(min(longitudes), max(longitudes),
3253                len(longitudes))
3254        print '    t in [%f, %f], len(t) == %d'\
3255              %(min(times), max(times), len(times))
3256
3257    ######### WRITE THE SWW FILE #############
3258    # NetCDF file definition
3259    outfile = NetCDFFile(sww_file, 'w')
3260
3261    #Create new file
3262    outfile.institution = 'Geoscience Australia'
3263    outfile.description = 'Converted from XXX'
3264
3265
3266    #For sww compatibility
3267    outfile.smoothing = 'Yes'
3268    outfile.order = 1
3269
3270    #Start time in seconds since the epoch (midnight 1/1/1970)
3271    outfile.starttime = starttime = times[0]
3272
3273
3274    # dimension definitions
3275    outfile.createDimension('number_of_volumes', number_of_volumes)
3276
3277    outfile.createDimension('number_of_vertices', 3)
3278    outfile.createDimension('number_of_points', number_of_points)
3279    outfile.createDimension('number_of_timesteps', number_of_times)
3280
3281    # variable definitions
3282    outfile.createVariable('x', precision, ('number_of_points',))
3283    outfile.createVariable('y', precision, ('number_of_points',))
3284    outfile.createVariable('elevation', precision, ('number_of_points',))
3285
3286    #FIXME: Backwards compatibility
3287    outfile.createVariable('z', precision, ('number_of_points',))
3288    #################################
3289
3290    outfile.createVariable('volumes', Int, ('number_of_volumes',
3291                                            'number_of_vertices'))
3292
3293    outfile.createVariable('time', precision,
3294                           ('number_of_timesteps',))
3295
3296    outfile.createVariable('stage', precision,
3297                           ('number_of_timesteps',
3298                            'number_of_points'))
3299
3300    outfile.createVariable('xmomentum', precision,
3301                           ('number_of_timesteps',
3302                            'number_of_points'))
3303
3304    outfile.createVariable('ymomentum', precision,
3305                           ('number_of_timesteps',
3306                            'number_of_points'))
3307
3308    #Store
3309    from coordinate_transforms.redfearn import redfearn
3310    x = zeros(number_of_points, Float)  #Easting
3311    y = zeros(number_of_points, Float)  #Northing
3312
3313    if verbose: print 'Making triangular grid'
3314    #Get zone of 1st point.
3315    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3316
3317    vertices = {}
3318    i = 0
3319    for k, lat in enumerate(latitudes):
3320        for l, lon in enumerate(longitudes):
3321
3322            vertices[l,k] = i
3323
3324            zone, easting, northing = redfearn(lat,lon)
3325
3326            msg = 'Zone boundary crossed at longitude =', lon
3327            #assert zone == refzone, msg
3328            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3329            x[i] = easting
3330            y[i] = northing
3331            i += 1
3332
3333
3334    #Construct 2 triangles per 'rectangular' element
3335    volumes = []
3336    for l in range(number_of_longitudes-1):    #X direction
3337        for k in range(number_of_latitudes-1): #Y direction
3338            v1 = vertices[l,k+1]
3339            v2 = vertices[l,k]
3340            v3 = vertices[l+1,k+1]
3341            v4 = vertices[l+1,k]
3342
3343            #Note, this is different to the ferrit2sww code
3344            #since the order of the lats is reversed.
3345            volumes.append([v1,v3,v2]) #Upper element
3346            volumes.append([v4,v2,v3]) #Lower element
3347
3348    volumes = array(volumes)
3349
3350    geo_ref = Geo_reference(refzone,min(x),min(y))
3351    geo_ref.write_NetCDF(outfile)
3352
3353    # This will put the geo ref in the middle
3354    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3355
3356
3357    if verbose:
3358        print '------------------------------------------------'
3359        print 'More Statistics:'
3360        print '  Extent (/lon):'
3361        print '    x in [%f, %f], len(lat) == %d'\
3362              %(min(x), max(x),
3363                len(x))
3364        print '    y in [%f, %f], len(lon) == %d'\
3365              %(min(y), max(y),
3366                len(y))
3367        print 'geo_ref: ',geo_ref
3368
3369    z = resize(bath_grid,outfile.variables['z'][:].shape)
3370    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3371    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3372    outfile.variables['z'][:] = z
3373    outfile.variables['elevation'][:] = z  #FIXME HACK
3374    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3375
3376    # do this to create an ok sww file.
3377    #outfile.variables['time'][:] = [0]   #Store time relative
3378    #outfile.variables['stage'] = z
3379    # put the next line up in the code after outfile.order = 1
3380    #number_of_times = 1
3381
3382    stage = outfile.variables['stage']
3383    xmomentum = outfile.variables['xmomentum']
3384    ymomentum = outfile.variables['ymomentum']
3385
3386    outfile.variables['time'][:] = times   #Store time relative
3387
3388    if verbose: print 'Converting quantities'
3389    n = number_of_times
3390    for j in range(number_of_times):
3391        # load in files
3392        elevation_meta, elevation_grid = \
3393           _read_asc(elevation_dir + os.sep + elevation_files[j])
3394
3395        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3396        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3397
3398        #print "elevation_grid",elevation_grid
3399        #cut matrix to desired size
3400        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3401        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3402        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3403        #print "elevation_grid",elevation_grid
3404        # handle missing values
3405        missing = (elevation_grid == elevation_meta['NODATA_value'])
3406        if sometrue (missing):
3407            if fail_on_NaN:
3408                msg = 'File %s contains missing values'\
3409                      %(elevation_files[j])
3410                raise msg
3411            else:
3412                elevation_grid = elevation_grid*(missing==0) + \
3413                                 missing*elevation_NaN_filler
3414
3415
3416        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3417        i = 0
3418        for k in range(number_of_latitudes):      #Y direction
3419            for l in range(number_of_longitudes): #X direction
3420                w = zscale*elevation_grid[k,l] + mean_stage
3421                stage[j,i] = w
3422                h = w - z[i]
3423                xmomentum[j,i] = u_momentum_grid[k,l]*h
3424                ymomentum[j,i] = v_momentum_grid[k,l]*h
3425                i += 1
3426    outfile.close()
3427
3428def _get_min_max_indexes(latitudes,longitudes,
3429                        minlat=None, maxlat=None,
3430                        minlon=None, maxlon=None):
3431    """
3432    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3433    specified with min/max lat/long
3434
3435    Think of the latitudes and longitudes describing a 2d surface.
3436    The area returned is, if possible, just big enough to cover the
3437    inputed max/min area. (This will not be possible if the max/min area
3438    has a section outside of the latitudes/longitudes area.)
3439
3440    assume latitudess & longitudes are sorted,
3441    long - from low to high (west to east, eg 148 - 151)
3442    lat - from high to low (north to south, eg -35 - -38)
3443    """
3444
3445    # reverse order of lat, so it's in ascending order
3446    latitudes.reverse()
3447    largest_lat_index = len(latitudes)-1
3448    #Cut out a smaller extent.
3449    if minlat == None:
3450        lat_min_index = 0
3451    else:
3452        lat_min_index = searchsorted(latitudes, minlat)-1
3453        if lat_min_index <0:
3454            lat_min_index = 0
3455
3456
3457    if maxlat == None:
3458        lat_max_index = largest_lat_index #len(latitudes)
3459    else:
3460        lat_max_index = searchsorted(latitudes, maxlat)
3461        if lat_max_index > largest_lat_index:
3462            lat_max_index = largest_lat_index
3463
3464    if minlon == None:
3465        lon_min_index = 0
3466    else:
3467        lon_min_index = searchsorted(longitudes, minlon)-1
3468        if lon_min_index <0:
3469            lon_min_index = 0
3470
3471    if maxlon == None:
3472        lon_max_index = len(longitudes)
3473    else:
3474        lon_max_index = searchsorted(longitudes, maxlon)
3475
3476    #Take into account that the latitude list was reversed
3477    latitudes.reverse() # Python passes by reference, need to swap back
3478    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3479                                   largest_lat_index - lat_min_index
3480    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3481    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3482
3483    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3484
3485
3486def _read_asc(filename, verbose=False):
3487    """Read esri file from the following ASCII format (.asc)
3488
3489    Example:
3490
3491    ncols         3121
3492    nrows         1800
3493    xllcorner     722000
3494    yllcorner     5893000
3495    cellsize      25
3496    NODATA_value  -9999
3497    138.3698 137.4194 136.5062 135.5558 ..........
3498
3499    """
3500
3501    datafile = open(filename)
3502
3503    if verbose: print 'Reading DEM from %s' %(filename)
3504    lines = datafile.readlines()
3505    datafile.close()
3506
3507    if verbose: print 'Got', len(lines), ' lines'
3508
3509    ncols = int(lines.pop(0).split()[1].strip())
3510    nrows = int(lines.pop(0).split()[1].strip())
3511    xllcorner = float(lines.pop(0).split()[1].strip())
3512    yllcorner = float(lines.pop(0).split()[1].strip())
3513    cellsize = float(lines.pop(0).split()[1].strip())
3514    NODATA_value = float(lines.pop(0).split()[1].strip())
3515
3516    assert len(lines) == nrows
3517
3518    #Store data
3519    grid = []
3520
3521    n = len(lines)
3522    for i, line in enumerate(lines):
3523        cells = line.split()
3524        assert len(cells) == ncols
3525        grid.append(array([float(x) for x in cells]))
3526    grid = array(grid)
3527
3528    return {'xllcorner':xllcorner,
3529            'yllcorner':yllcorner,
3530            'cellsize':cellsize,
3531            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.