source: inundation/pyvolution/data_manager.py @ 2126

Last change on this file since 2126 was 2112, checked in by steve, 18 years ago

Extension to colouring stage

File size: 114.2 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58def make_filename(s):
59    """Transform argument string into a suitable filename
60    """
61
62    s = s.strip()
63    s = s.replace(' ', '_')
64    s = s.replace('(', '')
65    s = s.replace(')', '')
66    s = s.replace('__', '_')
67
68    return s
69
70
71def check_dir(path, verbose=None):
72    """Check that specified path exists.
73    If path does not exist it will be created if possible
74
75    USAGE:
76       checkdir(path, verbose):
77
78    ARGUMENTS:
79        path -- Directory
80        verbose -- Flag verbose output (default: None)
81
82    RETURN VALUE:
83        Verified path including trailing separator
84
85    """
86
87    import os, sys
88    import os.path
89
90    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
91        unix = 0
92    else:
93        unix = 1
94
95
96    if path[-1] != os.sep:
97        path = path + os.sep  # Add separator for directories
98
99    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
100    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
101        try:
102            exitcode=os.mkdir(path)
103
104            # Change access rights if possible
105            #
106            if unix:
107                exitcode=os.system('chmod 775 '+path)
108            else:
109                pass  # FIXME: What about acces rights under Windows?
110
111            if verbose: print 'MESSAGE: Directory', path, 'created.'
112
113        except:
114            print 'WARNING: Directory', path, 'could not be created.'
115            if unix:
116                path = '/tmp/'
117            else:
118                path = 'C:'
119
120            print 'Using directory %s instead' %path
121
122    return(path)
123
124
125
126def del_dir(path):
127    """Recursively delete directory path and all its contents
128    """
129
130    import os
131
132    if os.path.isdir(path):
133        for file in os.listdir(path):
134            X = os.path.join(path, file)
135
136
137            if os.path.isdir(X) and not os.path.islink(X):
138                del_dir(X)
139            else:
140                try:
141                    os.remove(X)
142                except:
143                    print "Could not remove file %s" %X
144
145        os.rmdir(path)
146
147
148
149def create_filename(datadir, filename, format, size=None, time=None):
150
151    import os
152    #from config import data_dir
153
154    FN = check_dir(datadir) + filename
155
156    if size is not None:
157        FN += '_size%d' %size
158
159    if time is not None:
160        FN += '_time%.2f' %time
161
162    FN += '.' + format
163    return FN
164
165
166def get_files(datadir, filename, format, size):
167    """Get all file (names) with gven name, size and format
168    """
169
170    import glob
171
172    import os
173    #from config import data_dir
174
175    dir = check_dir(datadir)
176
177    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
178    return glob.glob(pattern)
179
180
181
182#Generic class for storing output to e.g. visualisation or checkpointing
183class Data_format:
184    """Generic interface to data formats
185    """
186
187
188    def __init__(self, domain, extension, mode = 'w'):
189        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
190                                        '''   'w' (write)'''+\
191                                        '''   'r' (read)''' +\
192                                        '''   'a' (append)'''
193
194        #Create filename
195        #self.filename = create_filename(domain.get_datadir(),
196        #domain.get_name(), extension, len(domain))
197
198
199        self.filename = create_filename(domain.get_datadir(),
200                        domain.get_name(), extension)
201
202        #print 'F', self.filename
203        self.timestep = 0
204        self.number_of_volumes = len(domain)
205        self.domain = domain
206
207
208        #FIXME: Should we have a general set_precision function?
209
210
211
212#Class for storing output to e.g. visualisation
213class Data_format_sww(Data_format):
214    """Interface to native NetCDF format (.sww) for storing model output
215
216    There are two kinds of data
217
218    1: Constant data: Vertex coordinates and field values. Stored once
219    2: Variable data: Conserved quantities. Stored once per timestep.
220
221    All data is assumed to reside at vertex locations.
222    """
223
224
225    def __init__(self, domain, mode = 'w',\
226                 max_size = 2000000000,
227                 recursion = False):
228        from Scientific.IO.NetCDF import NetCDFFile
229        from Numeric import Int, Float, Float32
230
231        self.precision = Float32 #Use single precision
232        if hasattr(domain, 'max_size'):
233            self.max_size = domain.max_size #file size max is 2Gig
234        else:
235            self.max_size = max_size
236        self.recursion = recursion
237        self.mode = mode
238
239        Data_format.__init__(self, domain, 'sww', mode)
240
241
242        # NetCDF file definition
243        fid = NetCDFFile(self.filename, mode)
244
245        if mode == 'w':
246
247            #Create new file
248            fid.institution = 'Geoscience Australia'
249            fid.description = 'Output from pyvolution suitable for plotting'
250
251            if domain.smooth:
252                fid.smoothing = 'Yes'
253            else:
254                fid.smoothing = 'No'
255
256            fid.order = domain.default_order
257
258            if hasattr(domain, 'texture'):
259                fid.texture = domain.texture
260            #else:
261            #    fid.texture = 'None'
262
263            #Reference point
264            #Start time in seconds since the epoch (midnight 1/1/1970)
265            #FIXME: Use Georef
266            fid.starttime = domain.starttime
267
268            # dimension definitions
269            fid.createDimension('number_of_volumes', self.number_of_volumes)
270            fid.createDimension('number_of_vertices', 3)
271
272            if domain.smooth is True:
273                fid.createDimension('number_of_points', len(domain.vertexlist))
274            else:
275                fid.createDimension('number_of_points', 3*self.number_of_volumes)
276
277            fid.createDimension('number_of_timesteps', None) #extensible
278
279            # variable definitions
280            fid.createVariable('x', self.precision, ('number_of_points',))
281            fid.createVariable('y', self.precision, ('number_of_points',))
282            fid.createVariable('elevation', self.precision, ('number_of_points',))
283            if domain.geo_reference is not None:
284                domain.geo_reference.write_NetCDF(fid)
285
286            #FIXME: Backwards compatibility
287            fid.createVariable('z', self.precision, ('number_of_points',))
288            #################################
289
290            fid.createVariable('volumes', Int, ('number_of_volumes',
291                                                'number_of_vertices'))
292
293            fid.createVariable('time', self.precision,
294                               ('number_of_timesteps',))
295
296            fid.createVariable('stage', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('xmomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304            fid.createVariable('ymomentum', self.precision,
305                               ('number_of_timesteps',
306                                'number_of_points'))
307
308        #Close
309        fid.close()
310
311
312    def store_connectivity(self):
313        """Specialisation of store_connectivity for net CDF format
314
315        Writes x,y,z coordinates of triangles constituting
316        the bed elevation.
317        """
318
319        from Scientific.IO.NetCDF import NetCDFFile
320
321        from Numeric import concatenate, Int
322
323        domain = self.domain
324
325        #Get NetCDF
326        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
327
328        # Get the variables
329        x = fid.variables['x']
330        y = fid.variables['y']
331        z = fid.variables['elevation']
332
333        volumes = fid.variables['volumes']
334
335        # Get X, Y and bed elevation Z
336        Q = domain.quantities['elevation']
337        X,Y,Z,V = Q.get_vertex_values(xy=True,
338                                      precision = self.precision)
339
340
341
342        x[:] = X.astype(self.precision)
343        y[:] = Y.astype(self.precision)
344        z[:] = Z.astype(self.precision)
345
346        #FIXME: Backwards compatibility
347        z = fid.variables['z']
348        z[:] = Z.astype(self.precision)
349        ################################
350
351        volumes[:] = V.astype(volumes.typecode())
352
353        #Close
354        fid.close()
355
356    def store_timestep(self, names):
357        """Store time and named quantities to file
358        """
359        from Scientific.IO.NetCDF import NetCDFFile
360        import types
361        from time import sleep
362        from os import stat
363
364        #minimum_allowed_depth = 0.001
365        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
366        from Numeric import choose
367
368        #Get NetCDF
369        retries = 0
370        file_open = False
371        while not file_open and retries < 10:
372            try:
373                fid = NetCDFFile(self.filename, 'a') #Open existing file
374            except IOError:
375                #This could happen if someone was reading the file.
376                #In that case, wait a while and try again
377                msg = 'Warning (store_timestep): File %s could not be opened'\
378                      %self.filename
379                msg += ' - trying step %s again' %self.domain.time
380                print msg
381                retries += 1
382                sleep(1)
383            else:
384                file_open = True
385
386        if not file_open:
387            msg = 'File %s could not be opened for append' %self.filename
388            raise msg
389
390
391
392        #Check to see if the file is already too big:
393        time = fid.variables['time']
394        i = len(time)+1
395        file_size = stat(self.filename)[6]
396        file_size_increase =  file_size/i
397        if file_size + file_size_increase > self.max_size*(2**self.recursion):
398            #in order to get the file name and start time correct,
399            #I change the domian.filename and domain.starttime.
400            #This is the only way to do this without changing
401            #other modules (I think).
402
403            #write a filename addon that won't break swollens reader
404            #(10.sww is bad)
405            filename_ext = '_time_%s'%self.domain.time
406            filename_ext = filename_ext.replace('.', '_')
407            #remember the old filename, then give domain a
408            #name with the extension
409            old_domain_filename = self.domain.filename
410            if not self.recursion:
411                self.domain.filename = self.domain.filename+filename_ext
412
413            #change the domain starttime to the current time
414            old_domain_starttime = self.domain.starttime
415            self.domain.starttime = self.domain.time
416
417            #build a new data_structure.
418            next_data_structure=\
419                Data_format_sww(self.domain, mode=self.mode,\
420                                max_size = self.max_size,\
421                                recursion = self.recursion+1)
422            if not self.recursion:
423                print '    file_size = %s'%file_size
424                print '    saving file to %s'%next_data_structure.filename
425            #set up the new data_structure
426            self.domain.writer = next_data_structure
427
428            #FIXME - could be cleaner to use domain.store_timestep etc.
429            next_data_structure.store_connectivity()
430            next_data_structure.store_timestep(names)
431            fid.sync()
432            fid.close()
433
434            #restore the old starttime and filename
435            self.domain.starttime = old_domain_starttime
436            self.domain.filename = old_domain_filename
437        else:
438            self.recursion = False
439            domain = self.domain
440
441            # Get the variables
442            time = fid.variables['time']
443            stage = fid.variables['stage']
444            xmomentum = fid.variables['xmomentum']
445            ymomentum = fid.variables['ymomentum']
446            i = len(time)
447
448            #Store time
449            time[i] = self.domain.time
450
451
452            if type(names) not in [types.ListType, types.TupleType]:
453                names = [names]
454
455            for name in names:
456                # Get quantity
457                Q = domain.quantities[name]
458                A,V = Q.get_vertex_values(xy = False,
459                                          precision = self.precision)
460
461                #FIXME: Make this general (see below)
462                if name == 'stage':
463                    z = fid.variables['elevation']
464                    #print z[:]
465                    #print A-z[:]
466                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
467                    stage[i,:] = A.astype(self.precision)
468                elif name == 'xmomentum':
469                    xmomentum[i,:] = A.astype(self.precision)
470                elif name == 'ymomentum':
471                    ymomentum[i,:] = A.astype(self.precision)
472
473                #As in....
474                #eval( name + '[i,:] = A.astype(self.precision)' )
475                #FIXME: But we need a UNIT test for that before refactoring
476
477
478
479            #Flush and close
480            fid.sync()
481            fid.close()
482
483
484
485#Class for handling checkpoints data
486class Data_format_cpt(Data_format):
487    """Interface to native NetCDF format (.cpt)
488    """
489
490
491    def __init__(self, domain, mode = 'w'):
492        from Scientific.IO.NetCDF import NetCDFFile
493        from Numeric import Int, Float, Float
494
495        self.precision = Float #Use full precision
496
497        Data_format.__init__(self, domain, 'sww', mode)
498
499
500        # NetCDF file definition
501        fid = NetCDFFile(self.filename, mode)
502
503        if mode == 'w':
504            #Create new file
505            fid.institution = 'Geoscience Australia'
506            fid.description = 'Checkpoint data'
507            #fid.smooth = domain.smooth
508            fid.order = domain.default_order
509
510            # dimension definitions
511            fid.createDimension('number_of_volumes', self.number_of_volumes)
512            fid.createDimension('number_of_vertices', 3)
513
514            #Store info at all vertices (no smoothing)
515            fid.createDimension('number_of_points', 3*self.number_of_volumes)
516            fid.createDimension('number_of_timesteps', None) #extensible
517
518            # variable definitions
519
520            #Mesh
521            fid.createVariable('x', self.precision, ('number_of_points',))
522            fid.createVariable('y', self.precision, ('number_of_points',))
523
524
525            fid.createVariable('volumes', Int, ('number_of_volumes',
526                                                'number_of_vertices'))
527
528            fid.createVariable('time', self.precision,
529                               ('number_of_timesteps',))
530
531            #Allocate space for all quantities
532            for name in domain.quantities.keys():
533                fid.createVariable(name, self.precision,
534                                   ('number_of_timesteps',
535                                    'number_of_points'))
536
537        #Close
538        fid.close()
539
540
541    def store_checkpoint(self):
542        """
543        Write x,y coordinates of triangles.
544        Write connectivity (
545        constituting
546        the bed elevation.
547        """
548
549        from Scientific.IO.NetCDF import NetCDFFile
550
551        from Numeric import concatenate
552
553        domain = self.domain
554
555        #Get NetCDF
556        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
557
558        # Get the variables
559        x = fid.variables['x']
560        y = fid.variables['y']
561
562        volumes = fid.variables['volumes']
563
564        # Get X, Y and bed elevation Z
565        Q = domain.quantities['elevation']
566        X,Y,Z,V = Q.get_vertex_values(xy=True,
567                                      precision = self.precision)
568
569
570
571        x[:] = X.astype(self.precision)
572        y[:] = Y.astype(self.precision)
573        z[:] = Z.astype(self.precision)
574
575        volumes[:] = V
576
577        #Close
578        fid.close()
579
580
581    def store_timestep(self, name):
582        """Store time and named quantity to file
583        """
584        from Scientific.IO.NetCDF import NetCDFFile
585        from time import sleep
586
587        #Get NetCDF
588        retries = 0
589        file_open = False
590        while not file_open and retries < 10:
591            try:
592                fid = NetCDFFile(self.filename, 'a') #Open existing file
593            except IOError:
594                #This could happen if someone was reading the file.
595                #In that case, wait a while and try again
596                msg = 'Warning (store_timestep): File %s could not be opened'\
597                          %self.filename
598                msg += ' - trying again'
599                print msg
600                retries += 1
601                sleep(1)
602            else:
603                file_open = True
604
605            if not file_open:
606                msg = 'File %s could not be opened for append' %self.filename
607            raise msg
608
609
610        domain = self.domain
611
612        # Get the variables
613        time = fid.variables['time']
614        stage = fid.variables['stage']
615        i = len(time)
616
617        #Store stage
618        time[i] = self.domain.time
619
620        # Get quantity
621        Q = domain.quantities[name]
622        A,V = Q.get_vertex_values(xy=False,
623                                  precision = self.precision)
624
625        stage[i,:] = A.astype(self.precision)
626
627        #Flush and close
628        fid.sync()
629        fid.close()
630
631
632
633
634
635#Function for storing xya output
636#FIXME Not done yet for this version
637class Data_format_xya(Data_format):
638    """Generic interface to data formats
639    """
640
641
642    def __init__(self, domain, mode = 'w'):
643        from Scientific.IO.NetCDF import NetCDFFile
644        from Numeric import Int, Float, Float32
645
646        self.precision = Float32 #Use single precision
647
648        Data_format.__init__(self, domain, 'xya', mode)
649
650
651
652    #FIXME -This is the old xya format
653    def store_all(self):
654        """Specialisation of store all for xya format
655
656        Writes x,y,z coordinates of triangles constituting
657        the bed elevation.
658        """
659
660        from Numeric import concatenate
661
662        domain = self.domain
663
664        fd = open(self.filename, 'w')
665
666
667        if domain.smooth is True:
668            number_of_points =  len(domain.vertexlist)
669        else:
670            number_of_points = 3*self.number_of_volumes
671
672        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
673
674        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
675                 " # <vertex #> <x> <y> [attributes]" + "\n")
676
677
678        # Get X, Y, bed elevation and friction (index=0,1)
679        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
680                                           indices = (0,1), precision = self.precision)
681
682        bed_eles = A[:,0]
683        fricts = A[:,1]
684
685        # Get stage (index=0)
686        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
687                                       indices = (0,), precision = self.precision)
688
689        stages = B[:,0]
690
691        #<vertex #> <x> <y> [attributes]
692        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
693                                               stages, fricts):
694
695            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
696            fd.write(s)
697
698        #close
699        fd.close()
700
701
702    def store_timestep(self, t, V0, V1, V2):
703        """Store time, water heights (and momentums) to file
704        """
705        pass
706
707
708#Auxiliary
709def write_obj(filename,x,y,z):
710    """Store x,y,z vectors into filename (obj format)
711       Vectors are assumed to have dimension (M,3) where
712       M corresponds to the number elements.
713       triangles are assumed to be disconnected
714
715       The three numbers in each vector correspond to three vertices,
716
717       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
718
719    """
720    #print 'Writing obj to %s' % filename
721
722    import os.path
723
724    root, ext = os.path.splitext(filename)
725    if ext == '.obj':
726        FN = filename
727    else:
728        FN = filename + '.obj'
729
730
731    outfile = open(FN, 'wb')
732    outfile.write("# Triangulation as an obj file\n")
733
734    M, N = x.shape
735    assert N==3  #Assuming three vertices per element
736
737    for i in range(M):
738        for j in range(N):
739            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
740
741    for i in range(M):
742        base = i*N
743        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
744
745    outfile.close()
746
747
748#########################################################
749#Conversion routines
750########################################################
751
752def sww2obj(basefilename, size):
753    """Convert netcdf based data output to obj
754    """
755    from Scientific.IO.NetCDF import NetCDFFile
756
757    from Numeric import Float, zeros
758
759    #Get NetCDF
760    FN = create_filename('.', basefilename, 'sww', size)
761    print 'Reading from ', FN
762    fid = NetCDFFile(FN, 'r')  #Open existing file for read
763
764
765    # Get the variables
766    x = fid.variables['x']
767    y = fid.variables['y']
768    z = fid.variables['elevation']
769    time = fid.variables['time']
770    stage = fid.variables['stage']
771
772    M = size  #Number of lines
773    xx = zeros((M,3), Float)
774    yy = zeros((M,3), Float)
775    zz = zeros((M,3), Float)
776
777    for i in range(M):
778        for j in range(3):
779            xx[i,j] = x[i+j*M]
780            yy[i,j] = y[i+j*M]
781            zz[i,j] = z[i+j*M]
782
783    #Write obj for bathymetry
784    FN = create_filename('.', basefilename, 'obj', size)
785    write_obj(FN,xx,yy,zz)
786
787
788    #Now read all the data with variable information, combine with
789    #x,y info and store as obj
790
791    for k in range(len(time)):
792        t = time[k]
793        print 'Processing timestep %f' %t
794
795        for i in range(M):
796            for j in range(3):
797                zz[i,j] = stage[k,i+j*M]
798
799
800        #Write obj for variable data
801        #FN = create_filename(basefilename, 'obj', size, time=t)
802        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
803        write_obj(FN,xx,yy,zz)
804
805
806def dat2obj(basefilename):
807    """Convert line based data output to obj
808    FIXME: Obsolete?
809    """
810
811    import glob, os
812    from config import data_dir
813
814
815    #Get bathymetry and x,y's
816    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
817
818    from Numeric import zeros, Float
819
820    M = len(lines)  #Number of lines
821    x = zeros((M,3), Float)
822    y = zeros((M,3), Float)
823    z = zeros((M,3), Float)
824
825    ##i = 0
826    for i, line in enumerate(lines):
827        tokens = line.split()
828        values = map(float,tokens)
829
830        for j in range(3):
831            x[i,j] = values[j*3]
832            y[i,j] = values[j*3+1]
833            z[i,j] = values[j*3+2]
834
835        ##i += 1
836
837
838    #Write obj for bathymetry
839    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
840
841
842    #Now read all the data files with variable information, combine with
843    #x,y info
844    #and store as obj
845
846    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
847
848    for filename in files:
849        print 'Processing %s' % filename
850
851        lines = open(data_dir+os.sep+filename,'r').readlines()
852        assert len(lines) == M
853        root, ext = os.path.splitext(filename)
854
855        #Get time from filename
856        i0 = filename.find('_time=')
857        if i0 == -1:
858            #Skip bathymetry file
859            continue
860
861        i0 += 6  #Position where time starts
862        i1 = filename.find('.dat')
863
864        if i1 > i0:
865            t = float(filename[i0:i1])
866        else:
867            raise 'Hmmmm'
868
869
870
871        ##i = 0
872        for i, line in enumerate(lines):
873            tokens = line.split()
874            values = map(float,tokens)
875
876            for j in range(3):
877                z[i,j] = values[j]
878
879            ##i += 1
880
881        #Write obj for variable data
882        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
883
884
885def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
886    """Read netcdf filename1, pick timesteps first:step:last and save to
887    nettcdf file filename2
888    """
889    from Scientific.IO.NetCDF import NetCDFFile
890
891    #Get NetCDF
892    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
893    outfile = NetCDFFile(filename2, 'w')  #Open new file
894
895
896    #Copy dimensions
897    for d in infile.dimensions:
898        outfile.createDimension(d, infile.dimensions[d])
899
900    for name in infile.variables:
901        var = infile.variables[name]
902        outfile.createVariable(name, var.typecode(), var.dimensions)
903
904
905    #Copy the static variables
906    for name in infile.variables:
907        if name == 'time' or name == 'stage':
908            pass
909        else:
910            #Copy
911            outfile.variables[name][:] = infile.variables[name][:]
912
913    #Copy selected timesteps
914    time = infile.variables['time']
915    stage = infile.variables['stage']
916
917    newtime = outfile.variables['time']
918    newstage = outfile.variables['stage']
919
920    if last is None:
921        last = len(time)
922
923    selection = range(first, last, step)
924    for i, j in enumerate(selection):
925        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
926        newtime[i] = time[j]
927        newstage[i,:] = stage[j,:]
928
929    #Close
930    infile.close()
931    outfile.close()
932
933
934#Get data objects
935def get_dataobject(domain, mode='w'):
936    """Return instance of class of given format using filename
937    """
938
939    cls = eval('Data_format_%s' %domain.format)
940    return cls(domain, mode)
941
942def xya2pts(basename_in, basename_out=None, verbose=False,
943            #easting_min=None, easting_max=None,
944            #northing_min=None, northing_max=None,
945            stride = 1,
946            attribute_name = 'elevation',
947            z_func = None):
948    """Read points data from ascii (.xya)
949
950    Example:
951
952              x(m)        y(m)        z(m)
953         0.00000e+00  0.00000e+00  1.3535000e-01
954         0.00000e+00  1.40000e-02  1.3535000e-01
955
956
957
958    Convert to NetCDF pts format which is
959
960    points:  (Nx2) Float array
961    elevation: N Float array
962
963    Only lines that contain three numeric values are processed
964
965    If z_func is specified, it will be applied to the third field
966    """
967
968    import os
969    #from Scientific.IO.NetCDF import NetCDFFile
970    from Numeric import Float, arrayrange, concatenate
971
972    root, ext = os.path.splitext(basename_in)
973
974    if ext == '': ext = '.xya'
975
976    #Get NetCDF
977    infile = open(root + ext, 'r')  #Open existing xya file for read
978
979    if verbose: print 'Reading xya points from %s' %(root + ext)
980
981    points = []
982    attribute = []
983    for i, line in enumerate(infile.readlines()):
984
985        if i % stride != 0: continue
986
987        fields = line.split()
988
989        try:
990            assert len(fields) == 3
991        except:
992            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
993
994        try:
995            x = float( fields[0] )
996            y = float( fields[1] )
997            z = float( fields[2] )
998        except:
999            continue
1000
1001        points.append( [x, y] )
1002
1003        if callable(z_func):
1004            attribute.append(z_func(z))
1005        else:
1006            attribute.append(z)
1007
1008
1009    #Get output file
1010    if basename_out == None:
1011        ptsname = root + '.pts'
1012    else:
1013        ptsname = basename_out + '.pts'
1014
1015    if verbose: print 'Store to NetCDF file %s' %ptsname
1016    write_ptsfile(ptsname, points, attribute, attribute_name)
1017
1018
1019def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1020    """Write points and associated attribute to pts (NetCDF) format
1021    """
1022
1023    from Numeric import Float
1024
1025    if attribute_name is None:
1026        attribute_name = 'attribute'
1027
1028
1029    from Scientific.IO.NetCDF import NetCDFFile
1030
1031    # NetCDF file definition
1032    outfile = NetCDFFile(ptsname, 'w')
1033
1034
1035    #Create new file
1036    outfile.institution = 'Geoscience Australia'
1037    outfile.description = 'NetCDF pts format for compact and '\
1038                          'portable storage of spatial point data'
1039
1040
1041    #Georeferencing
1042    from coordinate_transforms.geo_reference import Geo_reference
1043    Geo_reference().write_NetCDF(outfile)
1044
1045
1046    outfile.createDimension('number_of_points', len(points))
1047    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1048
1049    # variable definitions
1050    outfile.createVariable('points', Float, ('number_of_points',
1051                                             'number_of_dimensions'))
1052    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1053
1054    # Get handles to the variables
1055    nc_points = outfile.variables['points']
1056    nc_attribute = outfile.variables[attribute_name]
1057
1058    #Store data
1059    nc_points[:, :] = points
1060    nc_attribute[:] = attribute
1061
1062    outfile.close()
1063
1064
1065def dem2pts(basename_in, basename_out=None, verbose=False,
1066            easting_min=None, easting_max=None,
1067            northing_min=None, northing_max=None):
1068    """Read Digitial Elevation model from the following NetCDF format (.dem)
1069
1070    Example:
1071
1072    ncols         3121
1073    nrows         1800
1074    xllcorner     722000
1075    yllcorner     5893000
1076    cellsize      25
1077    NODATA_value  -9999
1078    138.3698 137.4194 136.5062 135.5558 ..........
1079
1080    Convert to NetCDF pts format which is
1081
1082    points:  (Nx2) Float array
1083    elevation: N Float array
1084    """
1085
1086    #FIXME: Can this be written feasibly using write_pts?
1087
1088    import os
1089    from Scientific.IO.NetCDF import NetCDFFile
1090    from Numeric import Float, zeros, reshape
1091
1092    root = basename_in
1093
1094    #Get NetCDF
1095    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1096
1097    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1098
1099    ncols = infile.ncols[0]
1100    nrows = infile.nrows[0]
1101    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1102    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1103    cellsize = infile.cellsize[0]
1104    NODATA_value = infile.NODATA_value[0]
1105    dem_elevation = infile.variables['elevation']
1106
1107    zone = infile.zone[0]
1108    false_easting = infile.false_easting[0]
1109    false_northing = infile.false_northing[0]
1110
1111    #Text strings
1112    projection = infile.projection
1113    datum = infile.datum
1114    units = infile.units
1115
1116
1117    #Get output file
1118    if basename_out == None:
1119        ptsname = root + '.pts'
1120    else:
1121        ptsname = basename_out + '.pts'
1122
1123    if verbose: print 'Store to NetCDF file %s' %ptsname
1124    # NetCDF file definition
1125    outfile = NetCDFFile(ptsname, 'w')
1126
1127    #Create new file
1128    outfile.institution = 'Geoscience Australia'
1129    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1130                      'of spatial point data'
1131    #assign default values
1132    if easting_min is None: easting_min = xllcorner
1133    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1134    if northing_min is None: northing_min = yllcorner
1135    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1136
1137    #compute offsets to update georeferencing
1138    easting_offset = xllcorner - easting_min
1139    northing_offset = yllcorner - northing_min
1140
1141    #Georeferencing
1142    outfile.zone = zone
1143    outfile.xllcorner = easting_min #Easting of lower left corner
1144    outfile.yllcorner = northing_min #Northing of lower left corner
1145    outfile.false_easting = false_easting
1146    outfile.false_northing = false_northing
1147
1148    outfile.projection = projection
1149    outfile.datum = datum
1150    outfile.units = units
1151
1152
1153    #Grid info (FIXME: probably not going to be used, but heck)
1154    outfile.ncols = ncols
1155    outfile.nrows = nrows
1156
1157
1158    # dimension definitions
1159    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1160    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1161    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1162    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1163
1164    # variable definitions
1165    outfile.createVariable('points', Float, ('number_of_points',
1166                                             'number_of_dimensions'))
1167    outfile.createVariable('elevation', Float, ('number_of_points',))
1168
1169    # Get handles to the variables
1170    points = outfile.variables['points']
1171    elevation = outfile.variables['elevation']
1172
1173    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1174
1175    #Store data
1176    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1177    global_index = 0
1178    for i in range(nrows):
1179        if verbose and i%((nrows+10)/10)==0:
1180            print 'Processing row %d of %d' %(i, nrows)
1181
1182        lower_index = global_index
1183        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1184        telev =  zeros(ncols_in_bounding_box, Float)
1185        local_index = 0
1186
1187        y = (nrows-i)*cellsize + yllcorner
1188        for j in range(ncols):
1189
1190            x = j*cellsize + xllcorner
1191            if easting_min <= x <= easting_max and \
1192               northing_min <= y <= northing_max:
1193                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1194                telev[local_index] = dem_elevation_r[i, j]
1195                global_index += 1
1196                local_index += 1
1197
1198        upper_index = global_index
1199
1200        if upper_index == lower_index + ncols_in_bounding_box:
1201            points[lower_index:upper_index, :] = tpoints
1202            elevation[lower_index:upper_index] = telev
1203
1204    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1205
1206    infile.close()
1207    outfile.close()
1208
1209
1210
1211def _read_hecras_cross_sections(lines):
1212    """Return block of surface lines for each cross section
1213    Starts with SURFACE LINE,
1214    Ends with END CROSS-SECTION
1215    """
1216
1217    points = []
1218
1219    reading_surface = False
1220    for i, line in enumerate(lines):
1221
1222        if len(line.strip()) == 0:    #Ignore blanks
1223            continue
1224
1225        if lines[i].strip().startswith('SURFACE LINE'):
1226            reading_surface = True
1227            continue
1228
1229        if lines[i].strip().startswith('END') and reading_surface:
1230            yield points
1231            reading_surface = False
1232            points = []
1233
1234        if reading_surface:
1235            fields = line.strip().split(',')
1236            easting = float(fields[0])
1237            northing = float(fields[1])
1238            elevation = float(fields[2])
1239            points.append([easting, northing, elevation])
1240
1241
1242
1243
1244def hecras_cross_sections2pts(basename_in,
1245                              basename_out=None,
1246                              verbose=False):
1247    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1248
1249    Example:
1250
1251
1252# RAS export file created on Mon 15Aug2005 11:42
1253# by HEC-RAS Version 3.1.1
1254
1255BEGIN HEADER:
1256  UNITS: METRIC
1257  DTM TYPE: TIN
1258  DTM: v:\1\cit\perth_topo\river_tin
1259  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1260  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1261  MAP PROJECTION: UTM
1262  PROJECTION ZONE: 50
1263  DATUM: AGD66
1264  VERTICAL DATUM:
1265  NUMBER OF REACHES:  19
1266  NUMBER OF CROSS-SECTIONS:  14206
1267END HEADER:
1268
1269
1270Only the SURFACE LINE data of the following form will be utilised
1271
1272  CROSS-SECTION:
1273    STREAM ID:Southern-Wungong
1274    REACH ID:Southern-Wungong
1275    STATION:19040.*
1276    CUT LINE:
1277      405548.671603161 , 6438142.7594925
1278      405734.536092045 , 6438326.10404912
1279      405745.130459356 , 6438331.48627354
1280      405813.89633823 , 6438368.6272789
1281    SURFACE LINE:
1282     405548.67,   6438142.76,   35.37
1283     405552.24,   6438146.28,   35.41
1284     405554.78,   6438148.78,   35.44
1285     405555.80,   6438149.79,   35.44
1286     405559.37,   6438153.31,   35.45
1287     405560.88,   6438154.81,   35.44
1288     405562.93,   6438156.83,   35.42
1289     405566.50,   6438160.35,   35.38
1290     405566.99,   6438160.83,   35.37
1291     ...
1292   END CROSS-SECTION
1293
1294    Convert to NetCDF pts format which is
1295
1296    points:  (Nx2) Float array
1297    elevation: N Float array
1298    """
1299
1300    #FIXME: Can this be written feasibly using write_pts?
1301
1302    import os
1303    from Scientific.IO.NetCDF import NetCDFFile
1304    from Numeric import Float, zeros, reshape
1305
1306    root = basename_in
1307
1308    #Get ASCII file
1309    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1310
1311    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1312
1313    lines = infile.readlines()
1314    infile.close()
1315
1316    if verbose: print 'Converting to pts format'
1317
1318    i = 0
1319    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1320        i += 1
1321
1322    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1323    i += 1
1324
1325    assert lines[i].strip().upper().startswith('UNITS:')
1326    units = lines[i].strip().split()[1]
1327    i += 1
1328
1329    assert lines[i].strip().upper().startswith('DTM TYPE:')
1330    i += 1
1331
1332    assert lines[i].strip().upper().startswith('DTM:')
1333    i += 1
1334
1335    assert lines[i].strip().upper().startswith('STREAM')
1336    i += 1
1337
1338    assert lines[i].strip().upper().startswith('CROSS')
1339    i += 1
1340
1341    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1342    projection = lines[i].strip().split(':')[1]
1343    i += 1
1344
1345    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1346    zone = int(lines[i].strip().split(':')[1])
1347    i += 1
1348
1349    assert lines[i].strip().upper().startswith('DATUM:')
1350    datum = lines[i].strip().split(':')[1]
1351    i += 1
1352
1353    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1354    i += 1
1355
1356    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1357    i += 1
1358
1359    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1360    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1361    i += 1
1362
1363
1364    #Now read all points
1365    points = []
1366    elevation = []
1367    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1368        for k, entry in enumerate(entries):
1369            points.append(entry[:2])
1370            elevation.append(entry[2])
1371
1372
1373    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1374          %(j+1, number_of_cross_sections)
1375    assert j+1 == number_of_cross_sections, msg
1376
1377    #Get output file
1378    if basename_out == None:
1379        ptsname = root + '.pts'
1380    else:
1381        ptsname = basename_out + '.pts'
1382
1383    #FIXME (DSG-ON): use loadASCII export_points_file
1384    if verbose: print 'Store to NetCDF file %s' %ptsname
1385    # NetCDF file definition
1386    outfile = NetCDFFile(ptsname, 'w')
1387
1388    #Create new file
1389    outfile.institution = 'Geoscience Australia'
1390    outfile.description = 'NetCDF pts format for compact and portable ' +\
1391                          'storage of spatial point data derived from HEC-RAS'
1392
1393    #Georeferencing
1394    outfile.zone = zone
1395    outfile.xllcorner = 0.0
1396    outfile.yllcorner = 0.0
1397    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1398    outfile.false_northing = 1000000
1399
1400    outfile.projection = projection
1401    outfile.datum = datum
1402    outfile.units = units
1403
1404
1405    outfile.createDimension('number_of_points', len(points))
1406    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1407
1408    # variable definitions
1409    outfile.createVariable('points', Float, ('number_of_points',
1410                                             'number_of_dimensions'))
1411    outfile.createVariable('elevation', Float, ('number_of_points',))
1412
1413    # Get handles to the variables
1414    outfile.variables['points'][:] = points
1415    outfile.variables['elevation'][:] = elevation
1416
1417    outfile.close()
1418
1419
1420
1421def sww2dem(basename_in, basename_out = None,
1422            quantity = None,
1423            timestep = None,
1424            reduction = None,
1425            cellsize = 10,
1426            NODATA_value = -9999,
1427            easting_min = None,
1428            easting_max = None,
1429            northing_min = None,
1430            northing_max = None,
1431            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1432            verbose = False,
1433            origin = None,
1434            datum = 'WGS84',
1435            format = 'ers'):
1436
1437    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1438
1439    Example (ASC):
1440
1441    ncols         3121
1442    nrows         1800
1443    xllcorner     722000
1444    yllcorner     5893000
1445    cellsize      25
1446    NODATA_value  -9999
1447    138.3698 137.4194 136.5062 135.5558 ..........
1448
1449    Also write accompanying file with same basename_in but extension .prj
1450    used to fix the UTM zone, datum, false northings and eastings.
1451
1452    The prj format is assumed to be as
1453
1454    Projection    UTM
1455    Zone          56
1456    Datum         WGS84
1457    Zunits        NO
1458    Units         METERS
1459    Spheroid      WGS84
1460    Xshift        0.0000000000
1461    Yshift        10000000.0000000000
1462    Parameters
1463
1464
1465    The parameter quantity must be the name of an existing quantity or
1466    an expression involving existing quantities. The default is
1467    'elevation'.
1468
1469    if timestep (an index) is given, output quantity at that timestep
1470
1471    if reduction is given use that to reduce quantity over all timesteps.
1472
1473    datum
1474
1475    format can be either 'asc' or 'ers'
1476    """
1477
1478    import sys
1479    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1480    from Numeric import array2string
1481
1482    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1483    from util import apply_expression_to_dictionary
1484
1485    msg = 'Format must be either asc or ers'
1486    assert format.lower() in ['asc', 'ers'], msg
1487
1488
1489    false_easting = 500000
1490    false_northing = 10000000
1491
1492    if quantity is None:
1493        quantity = 'elevation'
1494
1495    if reduction is None:
1496        reduction = max
1497
1498    if basename_out is None:
1499        basename_out = basename_in + '_%s' %quantity
1500
1501    swwfile = basename_in + '.sww'
1502    demfile = basename_out + '.' + format
1503    # Note the use of a .ers extension is optional (write_ermapper_grid will
1504    # deal with either option
1505
1506    #Read sww file
1507    if verbose: print 'Reading from %s' %swwfile
1508    from Scientific.IO.NetCDF import NetCDFFile
1509    fid = NetCDFFile(swwfile)
1510
1511    #Get extent and reference
1512    x = fid.variables['x'][:]
1513    y = fid.variables['y'][:]
1514    volumes = fid.variables['volumes'][:]
1515
1516    number_of_timesteps = fid.dimensions['number_of_timesteps']
1517    number_of_points = fid.dimensions['number_of_points']
1518    if origin is None:
1519
1520        #Get geo_reference
1521        #sww files don't have to have a geo_ref
1522        try:
1523            geo_reference = Geo_reference(NetCDFObject=fid)
1524        except AttributeError, e:
1525            geo_reference = Geo_reference() #Default georef object
1526
1527        xllcorner = geo_reference.get_xllcorner()
1528        yllcorner = geo_reference.get_yllcorner()
1529        zone = geo_reference.get_zone()
1530    else:
1531        zone = origin[0]
1532        xllcorner = origin[1]
1533        yllcorner = origin[2]
1534
1535
1536
1537    #FIXME: Refactor using code from file_function.statistics
1538    #Something like print swwstats(swwname)
1539    if verbose:
1540        x = fid.variables['x'][:]
1541        y = fid.variables['y'][:]
1542        times = fid.variables['time'][:]
1543        print '------------------------------------------------'
1544        print 'Statistics of SWW file:'
1545        print '  Name: %s' %swwfile
1546        print '  Reference:'
1547        print '    Lower left corner: [%f, %f]'\
1548              %(xllcorner, yllcorner)
1549        print '    Start time: %f' %fid.starttime[0]
1550        print '  Extent:'
1551        print '    x [m] in [%f, %f], len(x) == %d'\
1552              %(min(x.flat), max(x.flat), len(x.flat))
1553        print '    y [m] in [%f, %f], len(y) == %d'\
1554              %(min(y.flat), max(y.flat), len(y.flat))
1555        print '    t [s] in [%f, %f], len(t) == %d'\
1556              %(min(times), max(times), len(times))
1557        print '  Quantities [SI units]:'
1558        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1559            q = fid.variables[name][:].flat
1560            print '    %s in [%f, %f]' %(name, min(q), max(q))
1561
1562
1563
1564
1565
1566    #Get quantity and reduce if applicable
1567    if verbose: print 'Processing quantity %s' %quantity
1568
1569    #Turn NetCDF objects into Numeric arrays
1570    quantity_dict = {}
1571    for name in fid.variables.keys():
1572        quantity_dict[name] = fid.variables[name][:]
1573
1574
1575    q = apply_expression_to_dictionary(quantity, quantity_dict)
1576
1577
1578
1579    if len(q.shape) == 2:
1580        #q has a time component and needs to be reduced along
1581        #the temporal dimension
1582        if verbose: print 'Reducing quantity %s' %quantity
1583        q_reduced = zeros( number_of_points, Float )
1584
1585        for k in range(number_of_points):
1586            q_reduced[k] = reduction( q[:,k] )
1587
1588        q = q_reduced
1589
1590    #Post condition: Now q has dimension: number_of_points
1591    assert len(q.shape) == 1
1592    assert q.shape[0] == number_of_points
1593
1594
1595    if verbose:
1596        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1597
1598
1599    #Create grid and update xll/yll corner and x,y
1600
1601    #Relative extent
1602    if easting_min is None:
1603        xmin = min(x)
1604    else:
1605        xmin = easting_min - xllcorner
1606
1607    if easting_max is None:
1608        xmax = max(x)
1609    else:
1610        xmax = easting_max - xllcorner
1611
1612    if northing_min is None:
1613        ymin = min(y)
1614    else:
1615        ymin = northing_min - yllcorner
1616
1617    if northing_max is None:
1618        ymax = max(y)
1619    else:
1620        ymax = northing_max - yllcorner
1621
1622
1623
1624    if verbose: print 'Creating grid'
1625    ncols = int((xmax-xmin)/cellsize)+1
1626    nrows = int((ymax-ymin)/cellsize)+1
1627
1628
1629    #New absolute reference and coordinates
1630    newxllcorner = xmin+xllcorner
1631    newyllcorner = ymin+yllcorner
1632
1633    x = x+xllcorner-newxllcorner
1634    y = y+yllcorner-newyllcorner
1635
1636    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1637    assert len(vertex_points.shape) == 2
1638
1639
1640
1641    grid_points = zeros ( (ncols*nrows, 2), Float )
1642
1643
1644    for i in xrange(nrows):
1645        if format.lower() == 'asc':
1646            yg = i*cellsize
1647        else:
1648            #this will flip the order of the y values for ers
1649            yg = (nrows-i)*cellsize
1650
1651        for j in xrange(ncols):
1652            xg = j*cellsize
1653            k = i*ncols + j
1654
1655            grid_points[k,0] = xg
1656            grid_points[k,1] = yg
1657
1658    #Interpolate
1659    from least_squares import Interpolation
1660
1661
1662    #FIXME: This should be done with precrop = True (?), otherwise it'll
1663    #take forever. With expand_search  set to False, some grid points might
1664    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1665
1666    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1667                           precrop = False,
1668                           expand_search = expand_search,
1669                           verbose = verbose)
1670
1671
1672
1673    #Interpolate using quantity values
1674    if verbose: print 'Interpolating'
1675    grid_values = interp.interpolate(q).flat
1676
1677
1678    if verbose:
1679        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1680                                                      max(grid_values))
1681
1682    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1683    P = interp.mesh.get_boundary_polygon()
1684    outside_indices = outside_polygon(grid_points, P, closed=True)
1685
1686    for i in outside_indices:
1687        grid_values[i] = NODATA_value
1688
1689
1690
1691
1692    if format.lower() == 'ers':
1693        # setup ERS header information
1694        grid_values = reshape(grid_values,(nrows, ncols))
1695        header = {}
1696        header['datum'] = '"' + datum + '"'
1697        # FIXME The use of hardwired UTM and zone number needs to be made optional
1698        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1699        header['projection'] = '"UTM-' + str(zone) + '"'
1700        header['coordinatetype'] = 'EN'
1701        if header['coordinatetype'] == 'LL':
1702            header['longitude'] = str(newxllcorner)
1703            header['latitude'] = str(newyllcorner)
1704        elif header['coordinatetype'] == 'EN':
1705            header['eastings'] = str(newxllcorner)
1706            header['northings'] = str(newyllcorner)
1707        header['nullcellvalue'] = str(NODATA_value)
1708        header['xdimension'] = str(cellsize)
1709        header['ydimension'] = str(cellsize)
1710        header['value'] = '"' + quantity + '"'
1711        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1712
1713
1714        #Write
1715        if verbose: print 'Writing %s' %demfile
1716        import ermapper_grids
1717        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1718
1719        fid.close()
1720    else:
1721        #Write to Ascii format
1722
1723        #Write prj file
1724        prjfile = basename_out + '.prj'
1725
1726        if verbose: print 'Writing %s' %prjfile
1727        prjid = open(prjfile, 'w')
1728        prjid.write('Projection    %s\n' %'UTM')
1729        prjid.write('Zone          %d\n' %zone)
1730        prjid.write('Datum         %s\n' %datum)
1731        prjid.write('Zunits        NO\n')
1732        prjid.write('Units         METERS\n')
1733        prjid.write('Spheroid      %s\n' %datum)
1734        prjid.write('Xshift        %d\n' %false_easting)
1735        prjid.write('Yshift        %d\n' %false_northing)
1736        prjid.write('Parameters\n')
1737        prjid.close()
1738
1739
1740
1741        if verbose: print 'Writing %s' %demfile
1742
1743        ascid = open(demfile, 'w')
1744
1745        ascid.write('ncols         %d\n' %ncols)
1746        ascid.write('nrows         %d\n' %nrows)
1747        ascid.write('xllcorner     %d\n' %newxllcorner)
1748        ascid.write('yllcorner     %d\n' %newyllcorner)
1749        ascid.write('cellsize      %f\n' %cellsize)
1750        ascid.write('NODATA_value  %d\n' %NODATA_value)
1751
1752
1753        #Get bounding polygon from mesh
1754        #P = interp.mesh.get_boundary_polygon()
1755        #inside_indices = inside_polygon(grid_points, P)
1756
1757        for i in range(nrows):
1758            if verbose and i%((nrows+10)/10)==0:
1759                print 'Doing row %d of %d' %(i, nrows)
1760
1761            base_index = (nrows-i-1)*ncols
1762
1763            slice = grid_values[base_index:base_index+ncols]
1764            s = array2string(slice, max_line_width=sys.maxint)
1765            ascid.write(s[1:-1] + '\n')
1766
1767
1768            #print
1769            #for j in range(ncols):
1770            #    index = base_index+j#
1771            #    print grid_values[index],
1772            #    ascid.write('%f ' %grid_values[index])
1773            #ascid.write('\n')
1774
1775        #Close
1776        ascid.close()
1777        fid.close()
1778
1779#Backwards compatibility
1780def sww2asc(basename_in, basename_out = None,
1781            quantity = None,
1782            timestep = None,
1783            reduction = None,
1784            cellsize = 10,
1785            verbose = False,
1786            origin = None):
1787    print 'sww2asc will soon be obsoleted - please use sww2dem'
1788    sww2dem(basename_in,
1789            basename_out = basename_out,
1790            quantity = quantity,
1791            timestep = timestep,
1792            reduction = reduction,
1793            cellsize = cellsize,
1794            verbose = verbose,
1795            origin = origin,
1796            datum = 'WGS84',
1797            format = 'asc')
1798
1799def sww2ers(basename_in, basename_out = None,
1800            quantity = None,
1801            timestep = None,
1802            reduction = None,
1803            cellsize = 10,
1804            verbose = False,
1805            origin = None,
1806            datum = 'WGS84'):
1807    print 'sww2ers will soon be obsoleted - please use sww2dem'
1808    sww2dem(basename_in,
1809            basename_out = basename_out,
1810            quantity = quantity,
1811            timestep = timestep,
1812            reduction = reduction,
1813            cellsize = cellsize,
1814            verbose = verbose,
1815            origin = origin,
1816            datum = datum,
1817            format = 'ers')
1818################################# END COMPATIBILITY ##############
1819
1820
1821
1822
1823def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1824                                  verbose=False):
1825    """Read Digitial Elevation model from the following ASCII format (.asc)
1826
1827    Example:
1828
1829    ncols         3121
1830    nrows         1800
1831    xllcorner     722000
1832    yllcorner     5893000
1833    cellsize      25
1834    NODATA_value  -9999
1835    138.3698 137.4194 136.5062 135.5558 ..........
1836
1837    Convert basename_in + '.asc' to NetCDF format (.dem)
1838    mimicking the ASCII format closely.
1839
1840
1841    An accompanying file with same basename_in but extension .prj must exist
1842    and is used to fix the UTM zone, datum, false northings and eastings.
1843
1844    The prj format is assumed to be as
1845
1846    Projection    UTM
1847    Zone          56
1848    Datum         WGS84
1849    Zunits        NO
1850    Units         METERS
1851    Spheroid      WGS84
1852    Xshift        0.0000000000
1853    Yshift        10000000.0000000000
1854    Parameters
1855    """
1856
1857    import os
1858    from Scientific.IO.NetCDF import NetCDFFile
1859    from Numeric import Float, array
1860
1861    #root, ext = os.path.splitext(basename_in)
1862    root = basename_in
1863
1864    ###########################################
1865    # Read Meta data
1866    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1867    metadatafile = open(root + '.prj')
1868    metalines = metadatafile.readlines()
1869    metadatafile.close()
1870
1871    L = metalines[0].strip().split()
1872    assert L[0].strip().lower() == 'projection'
1873    projection = L[1].strip()                   #TEXT
1874
1875    L = metalines[1].strip().split()
1876    assert L[0].strip().lower() == 'zone'
1877    zone = int(L[1].strip())
1878
1879    L = metalines[2].strip().split()
1880    assert L[0].strip().lower() == 'datum'
1881    datum = L[1].strip()                        #TEXT
1882
1883    L = metalines[3].strip().split()
1884    assert L[0].strip().lower() == 'zunits'     #IGNORE
1885    zunits = L[1].strip()                       #TEXT
1886
1887    L = metalines[4].strip().split()
1888    assert L[0].strip().lower() == 'units'
1889    units = L[1].strip()                        #TEXT
1890
1891    L = metalines[5].strip().split()
1892    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1893    spheroid = L[1].strip()                     #TEXT
1894
1895    L = metalines[6].strip().split()
1896    assert L[0].strip().lower() == 'xshift'
1897    false_easting = float(L[1].strip())
1898
1899    L = metalines[7].strip().split()
1900    assert L[0].strip().lower() == 'yshift'
1901    false_northing = float(L[1].strip())
1902
1903    #print false_easting, false_northing, zone, datum
1904
1905
1906    ###########################################
1907    #Read DEM data
1908
1909    datafile = open(basename_in + '.asc')
1910
1911    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1912    lines = datafile.readlines()
1913    datafile.close()
1914
1915    if verbose: print 'Got', len(lines), ' lines'
1916
1917    ncols = int(lines[0].split()[1].strip())
1918    nrows = int(lines[1].split()[1].strip())
1919    xllcorner = float(lines[2].split()[1].strip())
1920    yllcorner = float(lines[3].split()[1].strip())
1921    cellsize = float(lines[4].split()[1].strip())
1922    NODATA_value = int(lines[5].split()[1].strip())
1923
1924    assert len(lines) == nrows + 6
1925
1926
1927    ##########################################
1928
1929
1930    if basename_out == None:
1931        netcdfname = root + '.dem'
1932    else:
1933        netcdfname = basename_out + '.dem'
1934
1935    if verbose: print 'Store to NetCDF file %s' %netcdfname
1936    # NetCDF file definition
1937    fid = NetCDFFile(netcdfname, 'w')
1938
1939    #Create new file
1940    fid.institution = 'Geoscience Australia'
1941    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1942                      'of spatial point data'
1943
1944    fid.ncols = ncols
1945    fid.nrows = nrows
1946    fid.xllcorner = xllcorner
1947    fid.yllcorner = yllcorner
1948    fid.cellsize = cellsize
1949    fid.NODATA_value = NODATA_value
1950
1951    fid.zone = zone
1952    fid.false_easting = false_easting
1953    fid.false_northing = false_northing
1954    fid.projection = projection
1955    fid.datum = datum
1956    fid.units = units
1957
1958
1959    # dimension definitions
1960    fid.createDimension('number_of_rows', nrows)
1961    fid.createDimension('number_of_columns', ncols)
1962
1963    # variable definitions
1964    fid.createVariable('elevation', Float, ('number_of_rows',
1965                                            'number_of_columns'))
1966
1967    # Get handles to the variables
1968    elevation = fid.variables['elevation']
1969
1970    #Store data
1971    n = len(lines[6:])
1972    for i, line in enumerate(lines[6:]):
1973        fields = line.split()
1974        if verbose and i%((n+10)/10)==0:
1975            print 'Processing row %d of %d' %(i, nrows)
1976
1977        elevation[i, :] = array([float(x) for x in fields])
1978
1979    fid.close()
1980
1981
1982
1983
1984
1985def ferret2sww(basename_in, basename_out = None,
1986               verbose = False,
1987               minlat = None, maxlat = None,
1988               minlon = None, maxlon = None,
1989               mint = None, maxt = None, mean_stage = 0,
1990               origin = None, zscale = 1,
1991               fail_on_NaN = True,
1992               NaN_filler = 0,
1993               elevation = None,
1994               inverted_bathymetry = False
1995               ): #FIXME: Bathymetry should be obtained
1996                                  #from MOST somehow.
1997                                  #Alternatively from elsewhere
1998                                  #or, as a last resort,
1999                                  #specified here.
2000                                  #The value of -100 will work
2001                                  #for the Wollongong tsunami
2002                                  #scenario but is very hacky
2003    """Convert 'Ferret' NetCDF format for wave propagation to
2004    sww format native to pyvolution.
2005
2006    Specify only basename_in and read files of the form
2007    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2008    relative height, x-velocity and y-velocity, respectively.
2009
2010    Also convert latitude and longitude to UTM. All coordinates are
2011    assumed to be given in the GDA94 datum.
2012
2013    min's and max's: If omitted - full extend is used.
2014    To include a value min may equal it, while max must exceed it.
2015    Lat and lon are assuemd to be in decimal degrees
2016
2017    origin is a 3-tuple with geo referenced
2018    UTM coordinates (zone, easting, northing)
2019
2020    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2021    which means that longitude is the fastest
2022    varying dimension (row major order, so to speak)
2023
2024    ferret2sww uses grid points as vertices in a triangular grid
2025    counting vertices from lower left corner upwards, then right
2026    """
2027
2028    import os
2029    from Scientific.IO.NetCDF import NetCDFFile
2030    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2031    from Numeric import allclose, around
2032
2033    precision = Float
2034
2035
2036    #Get NetCDF data
2037    if verbose: print 'Reading files %s_*.nc' %basename_in
2038    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2039    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2040    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2041    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2042
2043    if basename_out is None:
2044        swwname = basename_in + '.sww'
2045    else:
2046        swwname = basename_out + '.sww'
2047
2048    times = file_h.variables['TIME']
2049    latitudes = file_h.variables['LAT']
2050    longitudes = file_h.variables['LON']
2051
2052
2053
2054    #Precision used by most for lat/lon is 4 or 5 decimals
2055    e_lat = around(file_e.variables['LAT'][:], 5)
2056    e_lon = around(file_e.variables['LON'][:], 5)
2057
2058    #Check that files are compatible
2059    assert allclose(latitudes, file_u.variables['LAT'])
2060    assert allclose(latitudes, file_v.variables['LAT'])
2061    assert allclose(latitudes, e_lat)
2062
2063    assert allclose(longitudes, file_u.variables['LON'])
2064    assert allclose(longitudes, file_v.variables['LON'])
2065    assert allclose(longitudes, e_lon)
2066
2067
2068
2069    if mint == None:
2070        jmin = 0
2071    else:
2072        jmin = searchsorted(times, mint)
2073
2074    if maxt == None:
2075        jmax=len(times)
2076    else:
2077        jmax = searchsorted(times, maxt)
2078
2079    if minlat == None:
2080        kmin=0
2081    else:
2082        kmin = searchsorted(latitudes, minlat)
2083
2084    if maxlat == None:
2085        kmax = len(latitudes)
2086    else:
2087        kmax = searchsorted(latitudes, maxlat)
2088
2089    if minlon == None:
2090        lmin=0
2091    else:
2092        lmin = searchsorted(longitudes, minlon)
2093
2094    if maxlon == None:
2095        lmax = len(longitudes)
2096    else:
2097        lmax = searchsorted(longitudes, maxlon)
2098
2099
2100
2101    times = times[jmin:jmax]
2102    latitudes = latitudes[kmin:kmax]
2103    longitudes = longitudes[lmin:lmax]
2104
2105
2106    if verbose: print 'cropping'
2107    zname = 'ELEVATION'
2108
2109
2110    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2111    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2112    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2113    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2114
2115    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2116    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2117    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2118    #        from Numeric import asarray
2119    #        elevations=elevations.tolist()
2120    #        elevations.reverse()
2121    #        elevations=asarray(elevations)
2122    #    else:
2123    #        from Numeric import asarray
2124    #        elevations=elevations.tolist()
2125    #        elevations.reverse()
2126    #        elevations=asarray(elevations)
2127    #        'print hmmm'
2128
2129
2130
2131    #Get missing values
2132    nan_ha = file_h.variables['HA'].missing_value[0]
2133    nan_ua = file_u.variables['UA'].missing_value[0]
2134    nan_va = file_v.variables['VA'].missing_value[0]
2135    if hasattr(file_e.variables[zname],'missing_value'):
2136        nan_e  = file_e.variables[zname].missing_value[0]
2137    else:
2138        nan_e = None
2139
2140    #Cleanup
2141    from Numeric import sometrue
2142
2143    missing = (amplitudes == nan_ha)
2144    if sometrue (missing):
2145        if fail_on_NaN:
2146            msg = 'NetCDFFile %s contains missing values'\
2147                  %(basename_in+'_ha.nc')
2148            raise msg
2149        else:
2150            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2151
2152    missing = (uspeed == nan_ua)
2153    if sometrue (missing):
2154        if fail_on_NaN:
2155            msg = 'NetCDFFile %s contains missing values'\
2156                  %(basename_in+'_ua.nc')
2157            raise msg
2158        else:
2159            uspeed = uspeed*(missing==0) + missing*NaN_filler
2160
2161    missing = (vspeed == nan_va)
2162    if sometrue (missing):
2163        if fail_on_NaN:
2164            msg = 'NetCDFFile %s contains missing values'\
2165                  %(basename_in+'_va.nc')
2166            raise msg
2167        else:
2168            vspeed = vspeed*(missing==0) + missing*NaN_filler
2169
2170
2171    missing = (elevations == nan_e)
2172    if sometrue (missing):
2173        if fail_on_NaN:
2174            msg = 'NetCDFFile %s contains missing values'\
2175                  %(basename_in+'_e.nc')
2176            raise msg
2177        else:
2178            elevations = elevations*(missing==0) + missing*NaN_filler
2179
2180    #######
2181
2182
2183
2184    number_of_times = times.shape[0]
2185    number_of_latitudes = latitudes.shape[0]
2186    number_of_longitudes = longitudes.shape[0]
2187
2188    assert amplitudes.shape[0] == number_of_times
2189    assert amplitudes.shape[1] == number_of_latitudes
2190    assert amplitudes.shape[2] == number_of_longitudes
2191
2192    if verbose:
2193        print '------------------------------------------------'
2194        print 'Statistics:'
2195        print '  Extent (lat/lon):'
2196        print '    lat in [%f, %f], len(lat) == %d'\
2197              %(min(latitudes.flat), max(latitudes.flat),
2198                len(latitudes.flat))
2199        print '    lon in [%f, %f], len(lon) == %d'\
2200              %(min(longitudes.flat), max(longitudes.flat),
2201                len(longitudes.flat))
2202        print '    t in [%f, %f], len(t) == %d'\
2203              %(min(times.flat), max(times.flat), len(times.flat))
2204
2205        q = amplitudes.flat
2206        name = 'Amplitudes (ha) [cm]'
2207        print %s in [%f, %f]' %(name, min(q), max(q))
2208
2209        q = uspeed.flat
2210        name = 'Speeds (ua) [cm/s]'
2211        print %s in [%f, %f]' %(name, min(q), max(q))
2212
2213        q = vspeed.flat
2214        name = 'Speeds (va) [cm/s]'
2215        print %s in [%f, %f]' %(name, min(q), max(q))
2216
2217        q = elevations.flat
2218        name = 'Elevations (e) [m]'
2219        print %s in [%f, %f]' %(name, min(q), max(q))
2220
2221
2222    #print number_of_latitudes, number_of_longitudes
2223    number_of_points = number_of_latitudes*number_of_longitudes
2224    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2225
2226
2227    file_h.close()
2228    file_u.close()
2229    file_v.close()
2230    file_e.close()
2231
2232
2233    # NetCDF file definition
2234    outfile = NetCDFFile(swwname, 'w')
2235
2236    #Create new file
2237    outfile.institution = 'Geoscience Australia'
2238    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2239                          %(basename_in + '_ha.nc',
2240                            basename_in + '_ua.nc',
2241                            basename_in + '_va.nc',
2242                            basename_in + '_e.nc')
2243
2244
2245    #For sww compatibility
2246    outfile.smoothing = 'Yes'
2247    outfile.order = 1
2248
2249    #Start time in seconds since the epoch (midnight 1/1/1970)
2250    outfile.starttime = starttime = times[0]
2251    times = times - starttime  #Store relative times
2252
2253    # dimension definitions
2254    outfile.createDimension('number_of_volumes', number_of_volumes)
2255
2256    outfile.createDimension('number_of_vertices', 3)
2257    outfile.createDimension('number_of_points', number_of_points)
2258
2259
2260    #outfile.createDimension('number_of_timesteps', len(times))
2261    outfile.createDimension('number_of_timesteps', len(times))
2262
2263    # variable definitions
2264    outfile.createVariable('x', precision, ('number_of_points',))
2265    outfile.createVariable('y', precision, ('number_of_points',))
2266    outfile.createVariable('elevation', precision, ('number_of_points',))
2267
2268    #FIXME: Backwards compatibility
2269    outfile.createVariable('z', precision, ('number_of_points',))
2270    #################################
2271
2272    outfile.createVariable('volumes', Int, ('number_of_volumes',
2273                                            'number_of_vertices'))
2274
2275    outfile.createVariable('time', precision,
2276                           ('number_of_timesteps',))
2277
2278    outfile.createVariable('stage', precision,
2279                           ('number_of_timesteps',
2280                            'number_of_points'))
2281
2282    outfile.createVariable('xmomentum', precision,
2283                           ('number_of_timesteps',
2284                            'number_of_points'))
2285
2286    outfile.createVariable('ymomentum', precision,
2287                           ('number_of_timesteps',
2288                            'number_of_points'))
2289
2290
2291    #Store
2292    from coordinate_transforms.redfearn import redfearn
2293    x = zeros(number_of_points, Float)  #Easting
2294    y = zeros(number_of_points, Float)  #Northing
2295
2296
2297    if verbose: print 'Making triangular grid'
2298    #Check zone boundaries
2299    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2300
2301    vertices = {}
2302    i = 0
2303    for k, lat in enumerate(latitudes):       #Y direction
2304        for l, lon in enumerate(longitudes):  #X direction
2305
2306            vertices[l,k] = i
2307
2308            zone, easting, northing = redfearn(lat,lon)
2309
2310            msg = 'Zone boundary crossed at longitude =', lon
2311            #assert zone == refzone, msg
2312            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2313            x[i] = easting
2314            y[i] = northing
2315            i += 1
2316
2317
2318    #Construct 2 triangles per 'rectangular' element
2319    volumes = []
2320    for l in range(number_of_longitudes-1):    #X direction
2321        for k in range(number_of_latitudes-1): #Y direction
2322            v1 = vertices[l,k+1]
2323            v2 = vertices[l,k]
2324            v3 = vertices[l+1,k+1]
2325            v4 = vertices[l+1,k]
2326
2327            volumes.append([v1,v2,v3]) #Upper element
2328            volumes.append([v4,v3,v2]) #Lower element
2329
2330    volumes = array(volumes)
2331
2332    if origin == None:
2333        zone = refzone
2334        xllcorner = min(x)
2335        yllcorner = min(y)
2336    else:
2337        zone = origin[0]
2338        xllcorner = origin[1]
2339        yllcorner = origin[2]
2340
2341
2342    outfile.xllcorner = xllcorner
2343    outfile.yllcorner = yllcorner
2344    outfile.zone = zone
2345
2346
2347    if elevation is not None:
2348        z = elevation
2349    else:
2350        if inverted_bathymetry:
2351            z = -1*elevations
2352        else:
2353            z = elevations
2354        #FIXME: z should be obtained from MOST and passed in here
2355
2356    from Numeric import resize
2357    z = resize(z,outfile.variables['z'][:].shape)
2358    outfile.variables['x'][:] = x - xllcorner
2359    outfile.variables['y'][:] = y - yllcorner
2360    outfile.variables['z'][:] = z             #FIXME HACK
2361    outfile.variables['elevation'][:] = z
2362    outfile.variables['time'][:] = times   #Store time relative
2363    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2364
2365
2366
2367    #Time stepping
2368    stage = outfile.variables['stage']
2369    xmomentum = outfile.variables['xmomentum']
2370    ymomentum = outfile.variables['ymomentum']
2371
2372    if verbose: print 'Converting quantities'
2373    n = len(times)
2374    for j in range(n):
2375        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2376        i = 0
2377        for k in range(number_of_latitudes):      #Y direction
2378            for l in range(number_of_longitudes): #X direction
2379                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2380                stage[j,i] = w
2381                h = w - z[i]
2382                xmomentum[j,i] = uspeed[j,k,l]/100*h
2383                ymomentum[j,i] = vspeed[j,k,l]/100*h
2384                i += 1
2385
2386    #outfile.close()
2387
2388    #FIXME: Refactor using code from file_function.statistics
2389    #Something like print swwstats(swwname)
2390    if verbose:
2391        x = outfile.variables['x'][:]
2392        y = outfile.variables['y'][:]
2393        times = outfile.variables['time'][:]
2394        print '------------------------------------------------'
2395        print 'Statistics of output file:'
2396        print '  Name: %s' %swwname
2397        print '  Reference:'
2398        print '    Lower left corner: [%f, %f]'\
2399              %(xllcorner, yllcorner)
2400        print '    Start time: %f' %starttime
2401        print '  Extent:'
2402        print '    x [m] in [%f, %f], len(x) == %d'\
2403              %(min(x.flat), max(x.flat), len(x.flat))
2404        print '    y [m] in [%f, %f], len(y) == %d'\
2405              %(min(y.flat), max(y.flat), len(y.flat))
2406        print '    t [s] in [%f, %f], len(t) == %d'\
2407              %(min(times), max(times), len(times))
2408        print '  Quantities [SI units]:'
2409        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2410            q = outfile.variables[name][:].flat
2411            print '    %s in [%f, %f]' %(name, min(q), max(q))
2412
2413
2414
2415    outfile.close()
2416
2417
2418
2419
2420
2421def timefile2netcdf(filename, quantity_names = None):
2422    """Template for converting typical text files with time series to
2423    NetCDF tms file.
2424
2425
2426    The file format is assumed to be either two fields separated by a comma:
2427
2428        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2429
2430    E.g
2431
2432      31/08/04 00:00:00, 1.328223 0 0
2433      31/08/04 00:15:00, 1.292912 0 0
2434
2435    will provide a time dependent function f(t) with three attributes
2436
2437    filename is assumed to be the rootname with extenisons .txt and .sww
2438    """
2439
2440    import time, calendar
2441    from Numeric import array
2442    from config import time_format
2443    from util import ensure_numeric
2444
2445
2446    fid = open(filename + '.txt')
2447    line = fid.readline()
2448    fid.close()
2449
2450    fields = line.split(',')
2451    msg = 'File %s must have the format date, value0 value1 value2 ...'
2452    assert len(fields) == 2, msg
2453
2454    try:
2455        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2456    except ValueError:
2457        msg = 'First field in file %s must be' %filename
2458        msg += ' date-time with format %s.\n' %time_format
2459        msg += 'I got %s instead.' %fields[0]
2460        raise msg
2461
2462
2463    #Split values
2464    values = []
2465    for value in fields[1].split():
2466        values.append(float(value))
2467
2468    q = ensure_numeric(values)
2469
2470    msg = 'ERROR: File must contain at least one independent value'
2471    assert len(q.shape) == 1, msg
2472
2473
2474
2475    #Read times proper
2476    from Numeric import zeros, Float, alltrue
2477    from config import time_format
2478    import time, calendar
2479
2480    fid = open(filename + '.txt')
2481    lines = fid.readlines()
2482    fid.close()
2483
2484    N = len(lines)
2485    d = len(q)
2486
2487    T = zeros(N, Float)       #Time
2488    Q = zeros((N, d), Float)  #Values
2489
2490    for i, line in enumerate(lines):
2491        fields = line.split(',')
2492        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2493
2494        T[i] = realtime - starttime
2495
2496        for j, value in enumerate(fields[1].split()):
2497            Q[i, j] = float(value)
2498
2499    msg = 'File %s must list time as a monotonuosly ' %filename
2500    msg += 'increasing sequence'
2501    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2502
2503
2504    #Create NetCDF file
2505    from Scientific.IO.NetCDF import NetCDFFile
2506
2507    fid = NetCDFFile(filename + '.tms', 'w')
2508
2509
2510    fid.institution = 'Geoscience Australia'
2511    fid.description = 'Time series'
2512
2513
2514    #Reference point
2515    #Start time in seconds since the epoch (midnight 1/1/1970)
2516    #FIXME: Use Georef
2517    fid.starttime = starttime
2518
2519
2520    # dimension definitions
2521    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2522    #fid.createDimension('number_of_vertices', 3)
2523
2524
2525    fid.createDimension('number_of_timesteps', len(T))
2526
2527    fid.createVariable('time', Float, ('number_of_timesteps',))
2528
2529    fid.variables['time'][:] = T
2530
2531    for i in range(Q.shape[1]):
2532        try:
2533            name = quantity_names[i]
2534        except:
2535            name = 'Attribute%d'%i
2536
2537        fid.createVariable(name, Float, ('number_of_timesteps',))
2538        fid.variables[name][:] = Q[:,i]
2539
2540    fid.close()
2541
2542
2543def extent_sww(file_name):
2544    """
2545    Read in an sww file.
2546
2547    Input;
2548    file_name - the sww file
2549
2550    Output;
2551    z - Vector of bed elevation
2552    volumes - Array.  Each row has 3 values, representing
2553    the vertices that define the volume
2554    time - Vector of the times where there is stage information
2555    stage - array with respect to time and vertices (x,y)
2556    """
2557
2558
2559    from Scientific.IO.NetCDF import NetCDFFile
2560
2561    #Check contents
2562    #Get NetCDF
2563    fid = NetCDFFile(file_name, 'r')
2564
2565    # Get the variables
2566    x = fid.variables['x'][:]
2567    y = fid.variables['y'][:]
2568    stage = fid.variables['stage'][:]
2569    #print "stage",stage
2570    #print "stage.shap",stage.shape
2571    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2572    #print "min(stage)",min(stage)
2573
2574    fid.close()
2575
2576    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2577
2578
2579def sww2domain(filename,boundary=None,t=None,\
2580               fail_if_NaN=True,NaN_filler=0\
2581               ,verbose = False,very_verbose = False):
2582    """
2583    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2584
2585    Boundary is not recommended if domain.smooth is not selected, as it
2586    uses unique coordinates, but not unique boundaries. This means that
2587    the boundary file will not be compatable with the coordinates, and will
2588    give a different final boundary, or crash.
2589    """
2590    NaN=9.969209968386869e+036
2591    #initialise NaN.
2592
2593    from Scientific.IO.NetCDF import NetCDFFile
2594    from shallow_water import Domain
2595    from Numeric import asarray, transpose, resize
2596
2597    if verbose: print 'Reading from ', filename
2598    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2599    time = fid.variables['time']       #Timesteps
2600    if t is None:
2601        t = time[-1]
2602    time_interp = get_time_interp(time,t)
2603
2604    # Get the variables as Numeric arrays
2605    x = fid.variables['x'][:]             #x-coordinates of vertices
2606    y = fid.variables['y'][:]             #y-coordinates of vertices
2607    elevation = fid.variables['elevation']     #Elevation
2608    stage = fid.variables['stage']     #Water level
2609    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2610    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2611
2612    starttime = fid.starttime[0]
2613    volumes = fid.variables['volumes'][:] #Connectivity
2614    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2615
2616    conserved_quantities = []
2617    interpolated_quantities = {}
2618    other_quantities = []
2619
2620    # get geo_reference
2621    #sww files don't have to have a geo_ref
2622    try:
2623        geo_reference = Geo_reference(NetCDFObject=fid)
2624    except: #AttributeError, e:
2625        geo_reference = None
2626
2627    if verbose: print '    getting quantities'
2628    for quantity in fid.variables.keys():
2629        dimensions = fid.variables[quantity].dimensions
2630        if 'number_of_timesteps' in dimensions:
2631            conserved_quantities.append(quantity)
2632            interpolated_quantities[quantity]=\
2633                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2634        else: other_quantities.append(quantity)
2635
2636    other_quantities.remove('x')
2637    other_quantities.remove('y')
2638    other_quantities.remove('z')
2639    other_quantities.remove('volumes')
2640
2641    conserved_quantities.remove('time')
2642
2643    if verbose: print '    building domain'
2644    #    From domain.Domain:
2645    #    domain = Domain(coordinates, volumes,\
2646    #                    conserved_quantities = conserved_quantities,\
2647    #                    other_quantities = other_quantities,zone=zone,\
2648    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2649
2650    #   From shallow_water.Domain:
2651    coordinates=coordinates.tolist()
2652    volumes=volumes.tolist()
2653    #FIXME:should this be in mesh?(peter row)
2654    if fid.smoothing == 'Yes': unique = False
2655    else: unique = True
2656    if unique:
2657        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2658
2659
2660    domain = Domain(coordinates, volumes, boundary)
2661
2662    if not boundary is None:
2663        domain.boundary = boundary
2664
2665    domain.geo_reference = geo_reference
2666
2667    domain.starttime=float(starttime)+float(t)
2668    domain.time=0.0
2669
2670    for quantity in other_quantities:
2671        try:
2672            NaN = fid.variables[quantity].missing_value
2673        except:
2674            pass #quantity has no missing_value number
2675        X = fid.variables[quantity][:]
2676        if very_verbose:
2677            print '       ',quantity
2678            print '        NaN =',NaN
2679            print '        max(X)'
2680            print '       ',max(X)
2681            print '        max(X)==NaN'
2682            print '       ',max(X)==NaN
2683            print ''
2684        if (max(X)==NaN) or (min(X)==NaN):
2685            if fail_if_NaN:
2686                msg = 'quantity "%s" contains no_data entry'%quantity
2687                raise msg
2688            else:
2689                data = (X<>NaN)
2690                X = (X*data)+(data==0)*NaN_filler
2691        if unique:
2692            X = resize(X,(len(X)/3,3))
2693        domain.set_quantity(quantity,X)
2694    #
2695    for quantity in conserved_quantities:
2696        try:
2697            NaN = fid.variables[quantity].missing_value
2698        except:
2699            pass #quantity has no missing_value number
2700        X = interpolated_quantities[quantity]
2701        if very_verbose:
2702            print '       ',quantity
2703            print '        NaN =',NaN
2704            print '        max(X)'
2705            print '       ',max(X)
2706            print '        max(X)==NaN'
2707            print '       ',max(X)==NaN
2708            print ''
2709        if (max(X)==NaN) or (min(X)==NaN):
2710            if fail_if_NaN:
2711                msg = 'quantity "%s" contains no_data entry'%quantity
2712                raise msg
2713            else:
2714                data = (X<>NaN)
2715                X = (X*data)+(data==0)*NaN_filler
2716        if unique:
2717            X = resize(X,(X.shape[0]/3,3))
2718        domain.set_quantity(quantity,X)
2719    fid.close()
2720    return domain
2721
2722def interpolated_quantity(saved_quantity,time_interp):
2723
2724    #given an index and ratio, interpolate quantity with respect to time.
2725    index,ratio = time_interp
2726    Q = saved_quantity
2727    if ratio > 0:
2728        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2729    else:
2730        q = Q[index]
2731    #Return vector of interpolated values
2732    return q
2733
2734def get_time_interp(time,t=None):
2735    #Finds the ratio and index for time interpolation.
2736    #It is borrowed from previous pyvolution code.
2737    if t is None:
2738        t=time[-1]
2739        index = -1
2740        ratio = 0.
2741    else:
2742        T = time
2743        tau = t
2744        index=0
2745        msg = 'Time interval derived from file %s [%s:%s]'\
2746            %('FIXMEfilename', T[0], T[-1])
2747        msg += ' does not match model time: %s' %tau
2748        if tau < time[0]: raise msg
2749        if tau > time[-1]: raise msg
2750        while tau > time[index]: index += 1
2751        while tau < time[index]: index -= 1
2752        if tau == time[index]:
2753            #Protect against case where tau == time[-1] (last time)
2754            # - also works in general when tau == time[i]
2755            ratio = 0
2756        else:
2757            #t is now between index and index+1
2758            ratio = (tau - time[index])/(time[index+1] - time[index])
2759    return (index,ratio)
2760
2761
2762def weed(coordinates,volumes,boundary = None):
2763    if type(coordinates)=='array':
2764        coordinates = coordinates.tolist()
2765    if type(volumes)=='array':
2766        volumes = volumes.tolist()
2767
2768    unique = False
2769    point_dict = {}
2770    same_point = {}
2771    for i in range(len(coordinates)):
2772        point = tuple(coordinates[i])
2773        if point_dict.has_key(point):
2774            unique = True
2775            same_point[i]=point
2776            #to change all point i references to point j
2777        else:
2778            point_dict[point]=i
2779            same_point[i]=point
2780
2781    coordinates = []
2782    i = 0
2783    for point in point_dict.keys():
2784        point = tuple(point)
2785        coordinates.append(list(point))
2786        point_dict[point]=i
2787        i+=1
2788
2789
2790    for volume in volumes:
2791        for i in range(len(volume)):
2792            index = volume[i]
2793            if index>-1:
2794                volume[i]=point_dict[same_point[index]]
2795
2796    new_boundary = {}
2797    if not boundary is None:
2798        for segment in boundary.keys():
2799            point0 = point_dict[same_point[segment[0]]]
2800            point1 = point_dict[same_point[segment[1]]]
2801            label = boundary[segment]
2802            #FIXME should the bounday attributes be concaterated
2803            #('exterior, pond') or replaced ('pond')(peter row)
2804
2805            if new_boundary.has_key((point0,point1)):
2806                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2807                                              #+','+label
2808
2809            elif new_boundary.has_key((point1,point0)):
2810                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2811                                              #+','+label
2812            else: new_boundary[(point0,point1)]=label
2813
2814        boundary = new_boundary
2815
2816    return coordinates,volumes,boundary
2817
2818
2819
2820
2821
2822
2823
2824
2825
2826
2827###############################################
2828#OBSOLETE STUFF
2829#Native checkpoint format.
2830#Information needed to recreate a state is preserved
2831#FIXME: Rethink and maybe use netcdf format
2832def cpt_variable_writer(filename, t, v0, v1, v2):
2833    """Store all conserved quantities to file
2834    """
2835
2836    M, N = v0.shape
2837
2838    FN = create_filename(filename, 'cpt', M, t)
2839    #print 'Writing to %s' %FN
2840
2841    fid = open(FN, 'w')
2842    for i in range(M):
2843        for j in range(N):
2844            fid.write('%.16e ' %v0[i,j])
2845        for j in range(N):
2846            fid.write('%.16e ' %v1[i,j])
2847        for j in range(N):
2848            fid.write('%.16e ' %v2[i,j])
2849
2850        fid.write('\n')
2851    fid.close()
2852
2853
2854def cpt_variable_reader(filename, t, v0, v1, v2):
2855    """Store all conserved quantities to file
2856    """
2857
2858    M, N = v0.shape
2859
2860    FN = create_filename(filename, 'cpt', M, t)
2861    #print 'Reading from %s' %FN
2862
2863    fid = open(FN)
2864
2865
2866    for i in range(M):
2867        values = fid.readline().split() #Get one line
2868
2869        for j in range(N):
2870            v0[i,j] = float(values[j])
2871            v1[i,j] = float(values[3+j])
2872            v2[i,j] = float(values[6+j])
2873
2874    fid.close()
2875
2876def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2877    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2878    elevation.
2879    FIXME: Not in use pt
2880    """
2881
2882    M, N = v0.shape
2883
2884
2885    print X0
2886    import sys; sys.exit()
2887    FN = create_filename(filename, 'cpt', M)
2888    print 'Writing to %s' %FN
2889
2890    fid = open(FN, 'w')
2891    for i in range(M):
2892        for j in range(2):
2893            fid.write('%.16e ' %X0[i,j])   #x, y
2894        for j in range(N):
2895            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2896
2897        for j in range(2):
2898            fid.write('%.16e ' %X1[i,j])   #x, y
2899        for j in range(N):
2900            fid.write('%.16e ' %v1[i,j])
2901
2902        for j in range(2):
2903            fid.write('%.16e ' %X2[i,j])   #x, y
2904        for j in range(N):
2905            fid.write('%.16e ' %v2[i,j])
2906
2907        fid.write('\n')
2908    fid.close()
2909
2910
2911
2912#Function for storing out to e.g. visualisation
2913#FIXME: Do we want this?
2914#FIXME: Not done yet for this version
2915def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2916    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2917    """
2918
2919    M, N = v0.shape
2920
2921    FN = create_filename(filename, 'dat', M)
2922    #print 'Writing to %s' %FN
2923
2924    fid = open(FN, 'w')
2925    for i in range(M):
2926        for j in range(2):
2927            fid.write('%f ' %X0[i,j])   #x, y
2928        fid.write('%f ' %v0[i,0])       #z
2929
2930        for j in range(2):
2931            fid.write('%f ' %X1[i,j])   #x, y
2932        fid.write('%f ' %v1[i,0])       #z
2933
2934        for j in range(2):
2935            fid.write('%f ' %X2[i,j])   #x, y
2936        fid.write('%f ' %v2[i,0])       #z
2937
2938        fid.write('\n')
2939    fid.close()
2940
2941
2942
2943def dat_variable_writer(filename, t, v0, v1, v2):
2944    """Store water height to file
2945    """
2946
2947    M, N = v0.shape
2948
2949    FN = create_filename(filename, 'dat', M, t)
2950    #print 'Writing to %s' %FN
2951
2952    fid = open(FN, 'w')
2953    for i in range(M):
2954        fid.write('%.4f ' %v0[i,0])
2955        fid.write('%.4f ' %v1[i,0])
2956        fid.write('%.4f ' %v2[i,0])
2957
2958        fid.write('\n')
2959    fid.close()
2960
2961
2962def read_sww(filename):
2963    """Read sww Net CDF file containing Shallow Water Wave simulation
2964
2965    The integer array volumes is of shape Nx3 where N is the number of
2966    triangles in the mesh.
2967
2968    Each entry in volumes is an index into the x,y arrays (the location).
2969
2970    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2971    number_of_timesteps, number_of_points.
2972
2973    The momentum is not always stored.
2974
2975    """
2976    from Scientific.IO.NetCDF import NetCDFFile
2977    print 'Reading from ', filename
2978    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2979#latitude, longitude
2980    # Get the variables as Numeric arrays
2981    x = fid.variables['x']             #x-coordinates of vertices
2982    y = fid.variables['y']             #y-coordinates of vertices
2983    z = fid.variables['elevation']     #Elevation
2984    time = fid.variables['time']       #Timesteps
2985    stage = fid.variables['stage']     #Water level
2986    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2987    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2988
2989    volumes = fid.variables['volumes'] #Connectivity
2990
2991
2992def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2993                 verbose=False):
2994    """Read Digitial Elevation model from the following NetCDF format (.dem)
2995
2996    Example:
2997
2998    ncols         3121
2999    nrows         1800
3000    xllcorner     722000
3001    yllcorner     5893000
3002    cellsize      25
3003    NODATA_value  -9999
3004    138.3698 137.4194 136.5062 135.5558 ..........
3005
3006    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3007    """
3008
3009    import os
3010    from Scientific.IO.NetCDF import NetCDFFile
3011    from Numeric import Float, zeros, sum, reshape, equal
3012
3013    root = basename_in
3014    inname = root + '.dem'
3015
3016    #Open existing netcdf file to read
3017    infile = NetCDFFile(inname, 'r')
3018    if verbose: print 'Reading DEM from %s' %inname
3019
3020    #Read metadata
3021    ncols = infile.ncols[0]
3022    nrows = infile.nrows[0]
3023    xllcorner = infile.xllcorner[0]
3024    yllcorner = infile.yllcorner[0]
3025    cellsize = infile.cellsize[0]
3026    NODATA_value = infile.NODATA_value[0]
3027    zone = infile.zone[0]
3028    false_easting = infile.false_easting[0]
3029    false_northing = infile.false_northing[0]
3030    projection = infile.projection
3031    datum = infile.datum
3032    units = infile.units
3033
3034    dem_elevation = infile.variables['elevation']
3035
3036    #Get output file name
3037    if basename_out == None:
3038        outname = root + '_' + repr(cellsize_new) + '.dem'
3039    else:
3040        outname = basename_out + '.dem'
3041
3042    if verbose: print 'Write decimated NetCDF file to %s' %outname
3043
3044    #Determine some dimensions for decimated grid
3045    (nrows_stencil, ncols_stencil) = stencil.shape
3046    x_offset = ncols_stencil / 2
3047    y_offset = nrows_stencil / 2
3048    cellsize_ratio = int(cellsize_new / cellsize)
3049    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3050    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3051
3052    #Open netcdf file for output
3053    outfile = NetCDFFile(outname, 'w')
3054
3055    #Create new file
3056    outfile.institution = 'Geoscience Australia'
3057    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3058                           'of spatial point data'
3059    #Georeferencing
3060    outfile.zone = zone
3061    outfile.projection = projection
3062    outfile.datum = datum
3063    outfile.units = units
3064
3065    outfile.cellsize = cellsize_new
3066    outfile.NODATA_value = NODATA_value
3067    outfile.false_easting = false_easting
3068    outfile.false_northing = false_northing
3069
3070    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3071    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3072    outfile.ncols = ncols_new
3073    outfile.nrows = nrows_new
3074
3075    # dimension definition
3076    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3077
3078    # variable definition
3079    outfile.createVariable('elevation', Float, ('number_of_points',))
3080
3081    # Get handle to the variable
3082    elevation = outfile.variables['elevation']
3083
3084    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3085
3086    #Store data
3087    global_index = 0
3088    for i in range(nrows_new):
3089        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3090        lower_index = global_index
3091        telev =  zeros(ncols_new, Float)
3092        local_index = 0
3093        trow = i * cellsize_ratio
3094
3095        for j in range(ncols_new):
3096            tcol = j * cellsize_ratio
3097            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3098
3099            #if dem contains 1 or more NODATA_values set value in
3100            #decimated dem to NODATA_value, else compute decimated
3101            #value using stencil
3102            if sum(sum(equal(tmp, NODATA_value))) > 0:
3103                telev[local_index] = NODATA_value
3104            else:
3105                telev[local_index] = sum(sum(tmp * stencil))
3106
3107            global_index += 1
3108            local_index += 1
3109
3110        upper_index = global_index
3111
3112        elevation[lower_index:upper_index] = telev
3113
3114    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3115
3116    infile.close()
3117    outfile.close()
3118
3119
3120
3121def sww2asc_obsolete(basename_in, basename_out = None,
3122            quantity = None,
3123            timestep = None,
3124            reduction = None,
3125            cellsize = 10,
3126            verbose = False,
3127            origin = None):
3128    """Read SWW file and convert to Digitial Elevation model format (.asc)
3129
3130    Example:
3131
3132    ncols         3121
3133    nrows         1800
3134    xllcorner     722000
3135    yllcorner     5893000
3136    cellsize      25
3137    NODATA_value  -9999
3138    138.3698 137.4194 136.5062 135.5558 ..........
3139
3140    Also write accompanying file with same basename_in but extension .prj
3141    used to fix the UTM zone, datum, false northings and eastings.
3142
3143    The prj format is assumed to be as
3144
3145    Projection    UTM
3146    Zone          56
3147    Datum         WGS84
3148    Zunits        NO
3149    Units         METERS
3150    Spheroid      WGS84
3151    Xshift        0.0000000000
3152    Yshift        10000000.0000000000
3153    Parameters
3154
3155
3156    if quantity is given, out values from quantity otherwise default to
3157    elevation
3158
3159    if timestep (an index) is given, output quantity at that timestep
3160
3161    if reduction is given use that to reduce quantity over all timesteps.
3162
3163    """
3164    from Numeric import array, Float, concatenate, NewAxis, zeros,\
3165         sometrue
3166    from utilities.polygon import inside_polygon
3167
3168    #FIXME: Should be variable
3169    datum = 'WGS84'
3170    false_easting = 500000
3171    false_northing = 10000000
3172
3173    if quantity is None:
3174        quantity = 'elevation'
3175
3176    if reduction is None:
3177        reduction = max
3178
3179    if basename_out is None:
3180        basename_out = basename_in + '_%s' %quantity
3181
3182    swwfile = basename_in + '.sww'
3183    ascfile = basename_out + '.asc'
3184    prjfile = basename_out + '.prj'
3185
3186
3187    if verbose: print 'Reading from %s' %swwfile
3188    #Read sww file
3189    from Scientific.IO.NetCDF import NetCDFFile
3190    fid = NetCDFFile(swwfile)
3191
3192    #Get extent and reference
3193    x = fid.variables['x'][:]
3194    y = fid.variables['y'][:]
3195    volumes = fid.variables['volumes'][:]
3196
3197    ymin = min(y); ymax = max(y)
3198    xmin = min(x); xmax = max(x)
3199
3200    number_of_timesteps = fid.dimensions['number_of_timesteps']
3201    number_of_points = fid.dimensions['number_of_points']
3202    if origin is None:
3203
3204        #Get geo_reference
3205        #sww files don't have to have a geo_ref
3206        try:
3207            geo_reference = Geo_reference(NetCDFObject=fid)
3208        except AttributeError, e:
3209            geo_reference = Geo_reference() #Default georef object
3210
3211        xllcorner = geo_reference.get_xllcorner()
3212        yllcorner = geo_reference.get_yllcorner()
3213        zone = geo_reference.get_zone()
3214    else:
3215        zone = origin[0]
3216        xllcorner = origin[1]
3217        yllcorner = origin[2]
3218
3219
3220    #Get quantity and reduce if applicable
3221    if verbose: print 'Reading quantity %s' %quantity
3222
3223    if quantity.lower() == 'depth':
3224        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3225    else:
3226        q = fid.variables[quantity][:]
3227
3228
3229    if len(q.shape) == 2:
3230        if verbose: print 'Reducing quantity %s' %quantity
3231        q_reduced = zeros( number_of_points, Float )
3232
3233        for k in range(number_of_points):
3234            q_reduced[k] = reduction( q[:,k] )
3235
3236        q = q_reduced
3237
3238    #Now q has dimension: number_of_points
3239
3240    #Create grid and update xll/yll corner and x,y
3241    if verbose: print 'Creating grid'
3242    ncols = int((xmax-xmin)/cellsize)+1
3243    nrows = int((ymax-ymin)/cellsize)+1
3244
3245    newxllcorner = xmin+xllcorner
3246    newyllcorner = ymin+yllcorner
3247
3248    x = x+xllcorner-newxllcorner
3249    y = y+yllcorner-newyllcorner
3250
3251    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3252    assert len(vertex_points.shape) == 2
3253
3254
3255    from Numeric import zeros, Float
3256    grid_points = zeros ( (ncols*nrows, 2), Float )
3257
3258
3259    for i in xrange(nrows):
3260        yg = i*cellsize
3261        for j in xrange(ncols):
3262            xg = j*cellsize
3263            k = i*ncols + j
3264
3265            grid_points[k,0] = xg
3266            grid_points[k,1] = yg
3267
3268    #Interpolate
3269    from least_squares import Interpolation
3270
3271
3272    #FIXME: This should be done with precrop = True, otherwise it'll
3273    #take forever. With expand_search  set to False, some grid points might
3274    #miss out....
3275
3276    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3277                           precrop = False, expand_search = True,
3278                           verbose = verbose)
3279
3280    #Interpolate using quantity values
3281    if verbose: print 'Interpolating'
3282    grid_values = interp.interpolate(q).flat
3283
3284    #Write
3285    #Write prj file
3286    if verbose: print 'Writing %s' %prjfile
3287    prjid = open(prjfile, 'w')
3288    prjid.write('Projection    %s\n' %'UTM')
3289    prjid.write('Zone          %d\n' %zone)
3290    prjid.write('Datum         %s\n' %datum)
3291    prjid.write('Zunits        NO\n')
3292    prjid.write('Units         METERS\n')
3293    prjid.write('Spheroid      %s\n' %datum)
3294    prjid.write('Xshift        %d\n' %false_easting)
3295    prjid.write('Yshift        %d\n' %false_northing)
3296    prjid.write('Parameters\n')
3297    prjid.close()
3298
3299
3300
3301    if verbose: print 'Writing %s' %ascfile
3302    NODATA_value = -9999
3303
3304    ascid = open(ascfile, 'w')
3305
3306    ascid.write('ncols         %d\n' %ncols)
3307    ascid.write('nrows         %d\n' %nrows)
3308    ascid.write('xllcorner     %d\n' %newxllcorner)
3309    ascid.write('yllcorner     %d\n' %newyllcorner)
3310    ascid.write('cellsize      %f\n' %cellsize)
3311    ascid.write('NODATA_value  %d\n' %NODATA_value)
3312
3313
3314    #Get bounding polygon from mesh
3315    P = interp.mesh.get_boundary_polygon()
3316    inside_indices = inside_polygon(grid_points, P)
3317
3318    for i in range(nrows):
3319        if verbose and i%((nrows+10)/10)==0:
3320            print 'Doing row %d of %d' %(i, nrows)
3321
3322        for j in range(ncols):
3323            index = (nrows-i-1)*ncols+j
3324
3325            if sometrue(inside_indices == index):
3326                ascid.write('%f ' %grid_values[index])
3327            else:
3328                ascid.write('%d ' %NODATA_value)
3329
3330        ascid.write('\n')
3331
3332    #Close
3333    ascid.close()
3334    fid.close()
3335
3336#********************
3337#*** END OF OBSOLETE FUNCTIONS
3338#***************
3339
3340
3341def tsh2sww(filename, verbose=False): #test_tsh2sww
3342    """
3343    to check if a tsh/msh file 'looks' good.
3344    """
3345
3346    #FIXME Move to data_manager
3347    from shallow_water import Domain
3348    from pmesh2domain import pmesh_to_domain_instance
3349    import time, os
3350    from data_manager import get_dataobject
3351    from os import sep, path
3352    from util import mean
3353
3354    if verbose == True:print 'Creating domain from', filename
3355    domain = pmesh_to_domain_instance(filename, Domain)
3356    if verbose == True:print "Number of triangles = ", len(domain)
3357
3358    domain.smooth = True
3359    domain.format = 'sww'   #Native netcdf visualisation format
3360    file_path, filename = path.split(filename)
3361    filename, ext = path.splitext(filename)
3362    domain.filename = filename
3363    domain.reduction = mean
3364    if verbose == True:print "file_path",file_path
3365    if file_path == "":file_path = "."
3366    domain.set_datadir(file_path)
3367
3368    if verbose == True:
3369        print "Output written to " + domain.get_datadir() + sep + \
3370              domain.filename + "." + domain.format
3371    sww = get_dataobject(domain)
3372    sww.store_connectivity()
3373    sww.store_timestep('stage')
3374
3375
3376def asc_csiro2sww(bath_dir,
3377                  elevation_dir,
3378                  ucur_dir,
3379                  vcur_dir,
3380                  sww_file,
3381                  minlat = None, maxlat = None,
3382                  minlon = None, maxlon = None,
3383                  zscale=1,
3384                  mean_stage = 0,
3385                  fail_on_NaN = True,
3386                  elevation_NaN_filler = 0,
3387                  bath_prefix='ba',
3388                  elevation_prefix='el',
3389                  verbose=False):
3390    """
3391    Produce an sww boundary file, from esri ascii data from CSIRO.
3392
3393    Also convert latitude and longitude to UTM. All coordinates are
3394    assumed to be given in the GDA94 datum.
3395
3396    assume:
3397    All files are in esri ascii format
3398
3399    4 types of information
3400    bathymetry
3401    elevation
3402    u velocity
3403    v velocity
3404
3405    Assumptions
3406    The metadata of all the files is the same
3407    Each type is in a seperate directory
3408    One bath file with extention .000
3409    The time period is less than 24hrs and uniform.
3410    """
3411    from Scientific.IO.NetCDF import NetCDFFile
3412
3413    from coordinate_transforms.redfearn import redfearn
3414
3415    precision = Float # So if we want to change the precision its done here
3416
3417    # go in to the bath dir and load the only file,
3418    bath_files = os.listdir(bath_dir)
3419    #print "bath_files",bath_files
3420
3421    #fixme: make more general?
3422    bath_file = bath_files[0]
3423    bath_dir_file =  bath_dir + os.sep + bath_file
3424    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3425    #print "bath_metadata",bath_metadata
3426    #print "bath_grid",bath_grid
3427
3428    #Use the date.time of the bath file as a basis for
3429    #the start time for other files
3430    base_start = bath_file[-12:]
3431
3432    #go into the elevation dir and load the 000 file
3433    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3434                         + base_start
3435    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3436    #print "elevation_dir_file",elevation_dir_file
3437    #print "elevation_grid", elevation_grid
3438
3439    elevation_files = os.listdir(elevation_dir)
3440    ucur_files = os.listdir(ucur_dir)
3441    vcur_files = os.listdir(vcur_dir)
3442
3443    # the first elevation file should be the
3444    # file with the same base name as the bath data
3445    #print "elevation_files[0]",elevation_files[0]
3446    #print "'el' + base_start",'el' + base_start
3447    assert elevation_files[0] == 'el' + base_start
3448
3449    #assert bath_metadata == elevation_metadata
3450
3451
3452
3453    number_of_latitudes = bath_grid.shape[0]
3454    number_of_longitudes = bath_grid.shape[1]
3455    #number_of_times = len(os.listdir(elevation_dir))
3456    #number_of_points = number_of_latitudes*number_of_longitudes
3457    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3458
3459    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3460                  for x in range(number_of_longitudes)]
3461    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3462                 for y in range(number_of_latitudes)]
3463
3464     # reverse order of lat, so the fist lat represents the first grid row
3465    latitudes.reverse()
3466
3467    #print "latitudes - before _get_min_max_indexes",latitudes
3468    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3469                                                 minlat=minlat, maxlat=maxlat,
3470                                                 minlon=minlon, maxlon=maxlon)
3471
3472
3473    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3474    #print "bath_grid",bath_grid
3475    latitudes = latitudes[kmin:kmax]
3476    longitudes = longitudes[lmin:lmax]
3477    number_of_latitudes = len(latitudes)
3478    number_of_longitudes = len(longitudes)
3479    number_of_times = len(os.listdir(elevation_dir))
3480    number_of_points = number_of_latitudes*number_of_longitudes
3481    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3482    #print "number_of_points",number_of_points
3483
3484    #Work out the times
3485    if len(elevation_files) > 1:
3486        # Assume: The time period is less than 24hrs.
3487        time_period = (int(elevation_files[1][-3:]) - \
3488                      int(elevation_files[0][-3:]))*60*60
3489        times = [x*time_period for x in range(len(elevation_files))]
3490    else:
3491        times = [0.0]
3492    #print "times", times
3493    #print "number_of_latitudes", number_of_latitudes
3494    #print "number_of_longitudes", number_of_longitudes
3495    #print "number_of_times", number_of_times
3496    #print "latitudes", latitudes
3497    #print "longitudes", longitudes
3498
3499
3500    if verbose:
3501        print '------------------------------------------------'
3502        print 'Statistics:'
3503        print '  Extent (lat/lon):'
3504        print '    lat in [%f, %f], len(lat) == %d'\
3505              %(min(latitudes), max(latitudes),
3506                len(latitudes))
3507        print '    lon in [%f, %f], len(lon) == %d'\
3508              %(min(longitudes), max(longitudes),
3509                len(longitudes))
3510        print '    t in [%f, %f], len(t) == %d'\
3511              %(min(times), max(times), len(times))
3512
3513    ######### WRITE THE SWW FILE #############
3514    # NetCDF file definition
3515    outfile = NetCDFFile(sww_file, 'w')
3516
3517    #Create new file
3518    outfile.institution = 'Geoscience Australia'
3519    outfile.description = 'Converted from XXX'
3520
3521
3522    #For sww compatibility
3523    outfile.smoothing = 'Yes'
3524    outfile.order = 1
3525
3526    #Start time in seconds since the epoch (midnight 1/1/1970)
3527    outfile.starttime = starttime = times[0]
3528
3529
3530    # dimension definitions
3531    outfile.createDimension('number_of_volumes', number_of_volumes)
3532
3533    outfile.createDimension('number_of_vertices', 3)
3534    outfile.createDimension('number_of_points', number_of_points)
3535    outfile.createDimension('number_of_timesteps', number_of_times)
3536
3537    # variable definitions
3538    outfile.createVariable('x', precision, ('number_of_points',))
3539    outfile.createVariable('y', precision, ('number_of_points',))
3540    outfile.createVariable('elevation', precision, ('number_of_points',))
3541
3542    #FIXME: Backwards compatibility
3543    outfile.createVariable('z', precision, ('number_of_points',))
3544    #################################
3545
3546    outfile.createVariable('volumes', Int, ('number_of_volumes',
3547                                            'number_of_vertices'))
3548
3549    outfile.createVariable('time', precision,
3550                           ('number_of_timesteps',))
3551
3552    outfile.createVariable('stage', precision,
3553                           ('number_of_timesteps',
3554                            'number_of_points'))
3555
3556    outfile.createVariable('xmomentum', precision,
3557                           ('number_of_timesteps',
3558                            'number_of_points'))
3559
3560    outfile.createVariable('ymomentum', precision,
3561                           ('number_of_timesteps',
3562                            'number_of_points'))
3563
3564    #Store
3565    from coordinate_transforms.redfearn import redfearn
3566    x = zeros(number_of_points, Float)  #Easting
3567    y = zeros(number_of_points, Float)  #Northing
3568
3569    if verbose: print 'Making triangular grid'
3570    #Get zone of 1st point.
3571    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3572
3573    vertices = {}
3574    i = 0
3575    for k, lat in enumerate(latitudes):
3576        for l, lon in enumerate(longitudes):
3577
3578            vertices[l,k] = i
3579
3580            zone, easting, northing = redfearn(lat,lon)
3581
3582            msg = 'Zone boundary crossed at longitude =', lon
3583            #assert zone == refzone, msg
3584            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3585            x[i] = easting
3586            y[i] = northing
3587            i += 1
3588
3589
3590    #Construct 2 triangles per 'rectangular' element
3591    volumes = []
3592    for l in range(number_of_longitudes-1):    #X direction
3593        for k in range(number_of_latitudes-1): #Y direction
3594            v1 = vertices[l,k+1]
3595            v2 = vertices[l,k]
3596            v3 = vertices[l+1,k+1]
3597            v4 = vertices[l+1,k]
3598
3599            #Note, this is different to the ferrit2sww code
3600            #since the order of the lats is reversed.
3601            volumes.append([v1,v3,v2]) #Upper element
3602            volumes.append([v4,v2,v3]) #Lower element
3603
3604    volumes = array(volumes)
3605
3606    geo_ref = Geo_reference(refzone,min(x),min(y))
3607    geo_ref.write_NetCDF(outfile)
3608
3609    # This will put the geo ref in the middle
3610    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3611
3612
3613    if verbose:
3614        print '------------------------------------------------'
3615        print 'More Statistics:'
3616        print '  Extent (/lon):'
3617        print '    x in [%f, %f], len(lat) == %d'\
3618              %(min(x), max(x),
3619                len(x))
3620        print '    y in [%f, %f], len(lon) == %d'\
3621              %(min(y), max(y),
3622                len(y))
3623        print 'geo_ref: ',geo_ref
3624
3625    z = resize(bath_grid,outfile.variables['z'][:].shape)
3626    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3627    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3628    outfile.variables['z'][:] = z
3629    outfile.variables['elevation'][:] = z  #FIXME HACK
3630    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3631
3632    # do this to create an ok sww file.
3633    #outfile.variables['time'][:] = [0]   #Store time relative
3634    #outfile.variables['stage'] = z
3635    # put the next line up in the code after outfile.order = 1
3636    #number_of_times = 1
3637
3638    stage = outfile.variables['stage']
3639    xmomentum = outfile.variables['xmomentum']
3640    ymomentum = outfile.variables['ymomentum']
3641
3642    outfile.variables['time'][:] = times   #Store time relative
3643
3644    if verbose: print 'Converting quantities'
3645    n = number_of_times
3646    for j in range(number_of_times):
3647        # load in files
3648        elevation_meta, elevation_grid = \
3649           _read_asc(elevation_dir + os.sep + elevation_files[j])
3650
3651        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3652        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3653
3654        #print "elevation_grid",elevation_grid
3655        #cut matrix to desired size
3656        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3657        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3658        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3659        #print "elevation_grid",elevation_grid
3660        # handle missing values
3661        missing = (elevation_grid == elevation_meta['NODATA_value'])
3662        if sometrue (missing):
3663            if fail_on_NaN:
3664                msg = 'File %s contains missing values'\
3665                      %(elevation_files[j])
3666                raise msg
3667            else:
3668                elevation_grid = elevation_grid*(missing==0) + \
3669                                 missing*elevation_NaN_filler
3670
3671
3672        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3673        i = 0
3674        for k in range(number_of_latitudes):      #Y direction
3675            for l in range(number_of_longitudes): #X direction
3676                w = zscale*elevation_grid[k,l] + mean_stage
3677                stage[j,i] = w
3678                h = w - z[i]
3679                xmomentum[j,i] = u_momentum_grid[k,l]*h
3680                ymomentum[j,i] = v_momentum_grid[k,l]*h
3681                i += 1
3682    outfile.close()
3683
3684def _get_min_max_indexes(latitudes,longitudes,
3685                        minlat=None, maxlat=None,
3686                        minlon=None, maxlon=None):
3687    """
3688    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3689    specified with min/max lat/long
3690
3691    Think of the latitudes and longitudes describing a 2d surface.
3692    The area returned is, if possible, just big enough to cover the
3693    inputed max/min area. (This will not be possible if the max/min area
3694    has a section outside of the latitudes/longitudes area.)
3695
3696    assume latitudess & longitudes are sorted,
3697    long - from low to high (west to east, eg 148 - 151)
3698    lat - from high to low (north to south, eg -35 - -38)
3699    """
3700
3701    # reverse order of lat, so it's in ascending order
3702    latitudes.reverse()
3703    largest_lat_index = len(latitudes)-1
3704    #Cut out a smaller extent.
3705    if minlat == None:
3706        lat_min_index = 0
3707    else:
3708        lat_min_index = searchsorted(latitudes, minlat)-1
3709        if lat_min_index <0:
3710            lat_min_index = 0
3711
3712
3713    if maxlat == None:
3714        lat_max_index = largest_lat_index #len(latitudes)
3715    else:
3716        lat_max_index = searchsorted(latitudes, maxlat)
3717        if lat_max_index > largest_lat_index:
3718            lat_max_index = largest_lat_index
3719
3720    if minlon == None:
3721        lon_min_index = 0
3722    else:
3723        lon_min_index = searchsorted(longitudes, minlon)-1
3724        if lon_min_index <0:
3725            lon_min_index = 0
3726
3727    if maxlon == None:
3728        lon_max_index = len(longitudes)
3729    else:
3730        lon_max_index = searchsorted(longitudes, maxlon)
3731
3732    #Take into account that the latitude list was reversed
3733    latitudes.reverse() # Python passes by reference, need to swap back
3734    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3735                                   largest_lat_index - lat_min_index
3736    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3737    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3738
3739    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3740
3741
3742def _read_asc(filename, verbose=False):
3743    """Read esri file from the following ASCII format (.asc)
3744
3745    Example:
3746
3747    ncols         3121
3748    nrows         1800
3749    xllcorner     722000
3750    yllcorner     5893000
3751    cellsize      25
3752    NODATA_value  -9999
3753    138.3698 137.4194 136.5062 135.5558 ..........
3754
3755    """
3756
3757    datafile = open(filename)
3758
3759    if verbose: print 'Reading DEM from %s' %(filename)
3760    lines = datafile.readlines()
3761    datafile.close()
3762
3763    if verbose: print 'Got', len(lines), ' lines'
3764
3765    ncols = int(lines.pop(0).split()[1].strip())
3766    nrows = int(lines.pop(0).split()[1].strip())
3767    xllcorner = float(lines.pop(0).split()[1].strip())
3768    yllcorner = float(lines.pop(0).split()[1].strip())
3769    cellsize = float(lines.pop(0).split()[1].strip())
3770    NODATA_value = float(lines.pop(0).split()[1].strip())
3771
3772    assert len(lines) == nrows
3773
3774    #Store data
3775    grid = []
3776
3777    n = len(lines)
3778    for i, line in enumerate(lines):
3779        cells = line.split()
3780        assert len(cells) == ncols
3781        grid.append(array([float(x) for x in cells]))
3782    grid = array(grid)
3783
3784    return {'xllcorner':xllcorner,
3785            'yllcorner':yllcorner,
3786            'cellsize':cellsize,
3787            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.