source: inundation/pyvolution/data_manager.py @ 1992

Last change on this file since 1992 was 1992, checked in by duncan, 18 years ago

adding code to convert gippsland to sww

File size: 88.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50
51from Numeric import concatenate, array, Float
52
53from coordinate_transforms.geo_reference import Geo_reference
54
55def make_filename(s):
56    """Transform argument string into a suitable filename
57    """
58
59    s = s.strip()
60    s = s.replace(' ', '_')
61    s = s.replace('(', '')
62    s = s.replace(')', '')
63    s = s.replace('__', '_')
64
65    return s
66
67
68def check_dir(path, verbose=None):
69    """Check that specified path exists.
70    If path does not exist it will be created if possible
71
72    USAGE:
73       checkdir(path, verbose):
74
75    ARGUMENTS:
76        path -- Directory
77        verbose -- Flag verbose output (default: None)
78
79    RETURN VALUE:
80        Verified path including trailing separator
81
82    """
83
84    import os, sys
85    import os.path
86
87    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
88        unix = 0
89    else:
90        unix = 1
91
92
93    if path[-1] != os.sep:
94        path = path + os.sep  # Add separator for directories
95
96    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
97    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
98        try:
99            exitcode=os.mkdir(path)
100
101            # Change access rights if possible
102            #
103            if unix:
104                exitcode=os.system('chmod 775 '+path)
105            else:
106                pass  # FIXME: What about acces rights under Windows?
107
108            if verbose: print 'MESSAGE: Directory', path, 'created.'
109
110        except:
111            print 'WARNING: Directory', path, 'could not be created.'
112            if unix:
113                path = '/tmp/'
114            else:
115                path = 'C:'
116
117            print 'Using directory %s instead' %path
118
119    return(path)
120
121
122
123def del_dir(path):
124    """Recursively delete directory path and all its contents
125    """
126
127    import os
128
129    if os.path.isdir(path):
130        for file in os.listdir(path):
131            X = os.path.join(path, file)
132
133
134            if os.path.isdir(X) and not os.path.islink(X):
135                del_dir(X)
136            else:
137                try:
138                    os.remove(X)
139                except:
140                    print "Could not remove file %s" %X
141
142        os.rmdir(path)
143
144
145
146def create_filename(datadir, filename, format, size=None, time=None):
147
148    import os
149    #from config import data_dir
150
151    FN = check_dir(datadir) + filename
152
153    if size is not None:
154        FN += '_size%d' %size
155
156    if time is not None:
157        FN += '_time%.2f' %time
158
159    FN += '.' + format
160    return FN
161
162
163def get_files(datadir, filename, format, size):
164    """Get all file (names) with gven name, size and format
165    """
166
167    import glob
168
169    import os
170    #from config import data_dir
171
172    dir = check_dir(datadir)
173
174    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
175    return glob.glob(pattern)
176
177
178
179#Generic class for storing output to e.g. visualisation or checkpointing
180class Data_format:
181    """Generic interface to data formats
182    """
183
184
185    def __init__(self, domain, extension, mode = 'w'):
186        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
187                                        '''   'w' (write)'''+\
188                                        '''   'r' (read)''' +\
189                                        '''   'a' (append)'''
190
191        #Create filename
192        #self.filename = create_filename(domain.get_datadir(),
193        #domain.get_name(), extension, len(domain))
194
195
196        self.filename = create_filename(domain.get_datadir(),
197                        domain.get_name(), extension)
198
199        #print 'F', self.filename
200        self.timestep = 0
201        self.number_of_volumes = len(domain)
202        self.domain = domain
203
204
205        #FIXME: Should we have a general set_precision function?
206
207
208
209#Class for storing output to e.g. visualisation
210class Data_format_sww(Data_format):
211    """Interface to native NetCDF format (.sww) for storing model output
212
213    There are two kinds of data
214
215    1: Constant data: Vertex coordinates and field values. Stored once
216    2: Variable data: Conserved quantities. Stored once per timestep.
217
218    All data is assumed to reside at vertex locations.
219    """
220
221
222    def __init__(self, domain, mode = 'w',\
223                 max_size = 2000000000,
224                 recursion = False):
225        from Scientific.IO.NetCDF import NetCDFFile
226        from Numeric import Int, Float, Float32
227
228        self.precision = Float32 #Use single precision
229        if hasattr(domain, 'max_size'):
230            self.max_size = domain.max_size #file size max is 2Gig
231        else:
232            self.max_size = max_size
233        self.recursion = recursion
234        self.mode = mode
235
236        Data_format.__init__(self, domain, 'sww', mode)
237
238
239        # NetCDF file definition
240        fid = NetCDFFile(self.filename, mode)
241
242        if mode == 'w':
243
244            #Create new file
245            fid.institution = 'Geoscience Australia'
246            fid.description = 'Output from pyvolution suitable for plotting'
247
248            if domain.smooth:
249                fid.smoothing = 'Yes'
250            else:
251                fid.smoothing = 'No'
252
253            fid.order = domain.default_order
254           
255            if hasattr(domain, 'texture'):                 
256                fid.texture = domain.texture
257            #else:                         
258            #    fid.texture = 'None'       
259
260            #Reference point
261            #Start time in seconds since the epoch (midnight 1/1/1970)
262            #FIXME: Use Georef
263            fid.starttime = domain.starttime
264
265            # dimension definitions
266            fid.createDimension('number_of_volumes', self.number_of_volumes)
267            fid.createDimension('number_of_vertices', 3)
268
269            if domain.smooth is True:
270                fid.createDimension('number_of_points', len(domain.vertexlist))
271            else:
272                fid.createDimension('number_of_points', 3*self.number_of_volumes)
273
274            fid.createDimension('number_of_timesteps', None) #extensible
275
276            # variable definitions
277            fid.createVariable('x', self.precision, ('number_of_points',))
278            fid.createVariable('y', self.precision, ('number_of_points',))
279            fid.createVariable('elevation', self.precision, ('number_of_points',))
280            if domain.geo_reference is not None:
281                domain.geo_reference.write_NetCDF(fid)
282               
283            #FIXME: Backwards compatibility
284            fid.createVariable('z', self.precision, ('number_of_points',))
285            #################################
286
287            fid.createVariable('volumes', Int, ('number_of_volumes',
288                                                'number_of_vertices'))
289
290            fid.createVariable('time', self.precision,
291                               ('number_of_timesteps',))
292
293            fid.createVariable('stage', self.precision,
294                               ('number_of_timesteps',
295                                'number_of_points'))
296
297            fid.createVariable('xmomentum', self.precision,
298                               ('number_of_timesteps',
299                                'number_of_points'))
300
301            fid.createVariable('ymomentum', self.precision,
302                               ('number_of_timesteps',
303                                'number_of_points'))
304
305        #Close
306        fid.close()
307
308
309    def store_connectivity(self):
310        """Specialisation of store_connectivity for net CDF format
311
312        Writes x,y,z coordinates of triangles constituting
313        the bed elevation.
314        """
315
316        from Scientific.IO.NetCDF import NetCDFFile
317
318        from Numeric import concatenate, Int
319
320        domain = self.domain
321
322        #Get NetCDF
323        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
324
325        # Get the variables
326        x = fid.variables['x']
327        y = fid.variables['y']
328        z = fid.variables['elevation']
329
330        volumes = fid.variables['volumes']
331
332        # Get X, Y and bed elevation Z
333        Q = domain.quantities['elevation']
334        X,Y,Z,V = Q.get_vertex_values(xy=True,
335                                      precision = self.precision)
336
337
338
339        x[:] = X.astype(self.precision)
340        y[:] = Y.astype(self.precision)
341        z[:] = Z.astype(self.precision)
342
343        #FIXME: Backwards compatibility
344        z = fid.variables['z']
345        z[:] = Z.astype(self.precision)
346        ################################
347
348        volumes[:] = V.astype(volumes.typecode())
349
350        #Close
351        fid.close()
352
353    def store_timestep(self, names):
354        """Store time and named quantities to file
355        """
356        from Scientific.IO.NetCDF import NetCDFFile
357        import types
358        from time import sleep
359        from os import stat
360
361        minimum_allowed_depth = 0.001
362        #minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
363        from Numeric import choose
364
365        #Get NetCDF
366        retries = 0
367        file_open = False
368        while not file_open and retries < 10:
369            try:
370                fid = NetCDFFile(self.filename, 'a') #Open existing file
371            except IOError:
372                #This could happen if someone was reading the file.
373                #In that case, wait a while and try again
374                msg = 'Warning (store_timestep): File %s could not be opened'\
375                      %self.filename
376                msg += ' - trying step %s again' %self.domain.time
377                print msg
378                retries += 1
379                sleep(1)
380            else:
381                file_open = True
382
383        if not file_open:
384            msg = 'File %s could not be opened for append' %self.filename
385            raise msg
386
387
388
389        #Check to see if the file is already too big:
390        time = fid.variables['time']
391        i = len(time)+1
392        file_size = stat(self.filename)[6]
393        file_size_increase =  file_size/i
394        if file_size + file_size_increase > self.max_size*(2**self.recursion):
395            #in order to get the file name and start time correct,
396            #I change the domian.filename and domain.starttime.
397            #This is the only way to do this without changing
398            #other modules (I think).
399
400            #write a filename addon that won't break swollens reader
401            #(10.sww is bad)
402            filename_ext = '_time_%s'%self.domain.time
403            filename_ext = filename_ext.replace('.', '_')
404            #remember the old filename, then give domain a
405            #name with the extension
406            old_domain_filename = self.domain.filename
407            if not self.recursion:
408                self.domain.filename = self.domain.filename+filename_ext
409
410            #change the domain starttime to the current time
411            old_domain_starttime = self.domain.starttime
412            self.domain.starttime = self.domain.time
413
414            #build a new data_structure.
415            next_data_structure=\
416                Data_format_sww(self.domain, mode=self.mode,\
417                                max_size = self.max_size,\
418                                recursion = self.recursion+1)
419            if not self.recursion:
420                print '    file_size = %s'%file_size
421                print '    saving file to %s'%next_data_structure.filename
422            #set up the new data_structure
423            self.domain.writer = next_data_structure
424
425            #FIXME - could be cleaner to use domain.store_timestep etc.
426            next_data_structure.store_connectivity()
427            next_data_structure.store_timestep(names)
428            fid.sync()
429            fid.close()
430
431            #restore the old starttime and filename
432            self.domain.starttime = old_domain_starttime
433            self.domain.filename = old_domain_filename
434        else:
435            self.recursion = False
436            domain = self.domain
437
438            # Get the variables
439            time = fid.variables['time']
440            stage = fid.variables['stage']
441            xmomentum = fid.variables['xmomentum']
442            ymomentum = fid.variables['ymomentum']
443            i = len(time)
444
445            #Store time
446            time[i] = self.domain.time
447
448
449            if type(names) not in [types.ListType, types.TupleType]:
450                names = [names]
451
452            for name in names:
453                # Get quantity
454                Q = domain.quantities[name]
455                A,V = Q.get_vertex_values(xy = False,
456                                          precision = self.precision)
457
458                #FIXME: Make this general (see below)
459                if name == 'stage':
460                    z = fid.variables['elevation']
461                    #print z[:]
462                    #print A-z[:]
463                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
464                    stage[i,:] = A.astype(self.precision)
465                elif name == 'xmomentum':
466                    xmomentum[i,:] = A.astype(self.precision)
467                elif name == 'ymomentum':
468                    ymomentum[i,:] = A.astype(self.precision)
469
470                #As in....
471                #eval( name + '[i,:] = A.astype(self.precision)' )
472                #FIXME: But we need a UNIT test for that before refactoring
473
474
475
476            #Flush and close
477            fid.sync()
478            fid.close()
479
480
481
482#Class for handling checkpoints data
483class Data_format_cpt(Data_format):
484    """Interface to native NetCDF format (.cpt)
485    """
486
487
488    def __init__(self, domain, mode = 'w'):
489        from Scientific.IO.NetCDF import NetCDFFile
490        from Numeric import Int, Float, Float
491
492        self.precision = Float #Use full precision
493
494        Data_format.__init__(self, domain, 'sww', mode)
495
496
497        # NetCDF file definition
498        fid = NetCDFFile(self.filename, mode)
499
500        if mode == 'w':
501            #Create new file
502            fid.institution = 'Geoscience Australia'
503            fid.description = 'Checkpoint data'
504            #fid.smooth = domain.smooth
505            fid.order = domain.default_order
506
507            # dimension definitions
508            fid.createDimension('number_of_volumes', self.number_of_volumes)
509            fid.createDimension('number_of_vertices', 3)
510
511            #Store info at all vertices (no smoothing)
512            fid.createDimension('number_of_points', 3*self.number_of_volumes)
513            fid.createDimension('number_of_timesteps', None) #extensible
514
515            # variable definitions
516
517            #Mesh
518            fid.createVariable('x', self.precision, ('number_of_points',))
519            fid.createVariable('y', self.precision, ('number_of_points',))
520
521
522            fid.createVariable('volumes', Int, ('number_of_volumes',
523                                                'number_of_vertices'))
524
525            fid.createVariable('time', self.precision,
526                               ('number_of_timesteps',))
527
528            #Allocate space for all quantities
529            for name in domain.quantities.keys():
530                fid.createVariable(name, self.precision,
531                                   ('number_of_timesteps',
532                                    'number_of_points'))
533
534        #Close
535        fid.close()
536
537
538    def store_checkpoint(self):
539        """
540        Write x,y coordinates of triangles.
541        Write connectivity (
542        constituting
543        the bed elevation.
544        """
545
546        from Scientific.IO.NetCDF import NetCDFFile
547
548        from Numeric import concatenate
549
550        domain = self.domain
551
552        #Get NetCDF
553        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
554
555        # Get the variables
556        x = fid.variables['x']
557        y = fid.variables['y']
558
559        volumes = fid.variables['volumes']
560
561        # Get X, Y and bed elevation Z
562        Q = domain.quantities['elevation']
563        X,Y,Z,V = Q.get_vertex_values(xy=True,
564                                      precision = self.precision)
565
566
567
568        x[:] = X.astype(self.precision)
569        y[:] = Y.astype(self.precision)
570        z[:] = Z.astype(self.precision)
571
572        volumes[:] = V
573
574        #Close
575        fid.close()
576
577
578    def store_timestep(self, name):
579        """Store time and named quantity to file
580        """
581        from Scientific.IO.NetCDF import NetCDFFile
582        from time import sleep
583
584        #Get NetCDF
585        retries = 0
586        file_open = False
587        while not file_open and retries < 10:
588            try:
589                fid = NetCDFFile(self.filename, 'a') #Open existing file
590            except IOError:
591                #This could happen if someone was reading the file.
592                #In that case, wait a while and try again
593                msg = 'Warning (store_timestep): File %s could not be opened'\
594                          %self.filename
595                msg += ' - trying again'
596                print msg
597                retries += 1
598                sleep(1)
599            else:
600                file_open = True
601
602            if not file_open:
603                msg = 'File %s could not be opened for append' %self.filename
604            raise msg
605
606
607        domain = self.domain
608
609        # Get the variables
610        time = fid.variables['time']
611        stage = fid.variables['stage']
612        i = len(time)
613
614        #Store stage
615        time[i] = self.domain.time
616
617        # Get quantity
618        Q = domain.quantities[name]
619        A,V = Q.get_vertex_values(xy=False,
620                                  precision = self.precision)
621
622        stage[i,:] = A.astype(self.precision)
623
624        #Flush and close
625        fid.sync()
626        fid.close()
627
628
629
630
631
632#Function for storing xya output
633#FIXME Not done yet for this version
634class Data_format_xya(Data_format):
635    """Generic interface to data formats
636    """
637
638
639    def __init__(self, domain, mode = 'w'):
640        from Scientific.IO.NetCDF import NetCDFFile
641        from Numeric import Int, Float, Float32
642
643        self.precision = Float32 #Use single precision
644
645        Data_format.__init__(self, domain, 'xya', mode)
646
647
648
649    #FIXME -This is the old xya format
650    def store_all(self):
651        """Specialisation of store all for xya format
652
653        Writes x,y,z coordinates of triangles constituting
654        the bed elevation.
655        """
656
657        from Numeric import concatenate
658
659        domain = self.domain
660
661        fd = open(self.filename, 'w')
662
663
664        if domain.smooth is True:
665            number_of_points =  len(domain.vertexlist)
666        else:
667            number_of_points = 3*self.number_of_volumes
668
669        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
670
671        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
672                 " # <vertex #> <x> <y> [attributes]" + "\n")
673
674
675        # Get X, Y, bed elevation and friction (index=0,1)
676        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
677                                           indices = (0,1), precision = self.precision)
678
679        bed_eles = A[:,0]
680        fricts = A[:,1]
681
682        # Get stage (index=0)
683        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
684                                       indices = (0,), precision = self.precision)
685
686        stages = B[:,0]
687
688        #<vertex #> <x> <y> [attributes]
689        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
690                                               stages, fricts):
691
692            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
693            fd.write(s)
694
695        #close
696        fd.close()
697
698
699    def store_timestep(self, t, V0, V1, V2):
700        """Store time, water heights (and momentums) to file
701        """
702        pass
703
704
705#Auxiliary
706def write_obj(filename,x,y,z):
707    """Store x,y,z vectors into filename (obj format)
708       Vectors are assumed to have dimension (M,3) where
709       M corresponds to the number elements.
710       triangles are assumed to be disconnected
711
712       The three numbers in each vector correspond to three vertices,
713
714       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
715
716    """
717    #print 'Writing obj to %s' % filename
718
719    import os.path
720
721    root, ext = os.path.splitext(filename)
722    if ext == '.obj':
723        FN = filename
724    else:
725        FN = filename + '.obj'
726
727
728    outfile = open(FN, 'wb')
729    outfile.write("# Triangulation as an obj file\n")
730
731    M, N = x.shape
732    assert N==3  #Assuming three vertices per element
733
734    for i in range(M):
735        for j in range(N):
736            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
737
738    for i in range(M):
739        base = i*N
740        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
741
742    outfile.close()
743
744
745#########################################################
746#Conversion routines
747########################################################
748
749def sww2obj(basefilename, size):
750    """Convert netcdf based data output to obj
751    """
752    from Scientific.IO.NetCDF import NetCDFFile
753
754    from Numeric import Float, zeros
755
756    #Get NetCDF
757    FN = create_filename('.', basefilename, 'sww', size)
758    print 'Reading from ', FN
759    fid = NetCDFFile(FN, 'r')  #Open existing file for read
760
761
762    # Get the variables
763    x = fid.variables['x']
764    y = fid.variables['y']
765    z = fid.variables['elevation']
766    time = fid.variables['time']
767    stage = fid.variables['stage']
768
769    M = size  #Number of lines
770    xx = zeros((M,3), Float)
771    yy = zeros((M,3), Float)
772    zz = zeros((M,3), Float)
773
774    for i in range(M):
775        for j in range(3):
776            xx[i,j] = x[i+j*M]
777            yy[i,j] = y[i+j*M]
778            zz[i,j] = z[i+j*M]
779
780    #Write obj for bathymetry
781    FN = create_filename('.', basefilename, 'obj', size)
782    write_obj(FN,xx,yy,zz)
783
784
785    #Now read all the data with variable information, combine with
786    #x,y info and store as obj
787
788    for k in range(len(time)):
789        t = time[k]
790        print 'Processing timestep %f' %t
791
792        for i in range(M):
793            for j in range(3):
794                zz[i,j] = stage[k,i+j*M]
795
796
797        #Write obj for variable data
798        #FN = create_filename(basefilename, 'obj', size, time=t)
799        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
800        write_obj(FN,xx,yy,zz)
801
802
803def dat2obj(basefilename):
804    """Convert line based data output to obj
805    FIXME: Obsolete?
806    """
807
808    import glob, os
809    from config import data_dir
810
811
812    #Get bathymetry and x,y's
813    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
814
815    from Numeric import zeros, Float
816
817    M = len(lines)  #Number of lines
818    x = zeros((M,3), Float)
819    y = zeros((M,3), Float)
820    z = zeros((M,3), Float)
821
822    ##i = 0
823    for i, line in enumerate(lines):
824        tokens = line.split()
825        values = map(float,tokens)
826
827        for j in range(3):
828            x[i,j] = values[j*3]
829            y[i,j] = values[j*3+1]
830            z[i,j] = values[j*3+2]
831
832        ##i += 1
833
834
835    #Write obj for bathymetry
836    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
837
838
839    #Now read all the data files with variable information, combine with
840    #x,y info
841    #and store as obj
842
843    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
844
845    for filename in files:
846        print 'Processing %s' % filename
847
848        lines = open(data_dir+os.sep+filename,'r').readlines()
849        assert len(lines) == M
850        root, ext = os.path.splitext(filename)
851
852        #Get time from filename
853        i0 = filename.find('_time=')
854        if i0 == -1:
855            #Skip bathymetry file
856            continue
857
858        i0 += 6  #Position where time starts
859        i1 = filename.find('.dat')
860
861        if i1 > i0:
862            t = float(filename[i0:i1])
863        else:
864            raise 'Hmmmm'
865
866
867
868        ##i = 0
869        for i, line in enumerate(lines):
870            tokens = line.split()
871            values = map(float,tokens)
872
873            for j in range(3):
874                z[i,j] = values[j]
875
876            ##i += 1
877
878        #Write obj for variable data
879        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
880
881
882def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
883    """Read netcdf filename1, pick timesteps first:step:last and save to
884    nettcdf file filename2
885    """
886    from Scientific.IO.NetCDF import NetCDFFile
887
888    #Get NetCDF
889    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
890    outfile = NetCDFFile(filename2, 'w')  #Open new file
891
892
893    #Copy dimensions
894    for d in infile.dimensions:
895        outfile.createDimension(d, infile.dimensions[d])
896
897    for name in infile.variables:
898        var = infile.variables[name]
899        outfile.createVariable(name, var.typecode(), var.dimensions)
900
901
902    #Copy the static variables
903    for name in infile.variables:
904        if name == 'time' or name == 'stage':
905            pass
906        else:
907            #Copy
908            outfile.variables[name][:] = infile.variables[name][:]
909
910    #Copy selected timesteps
911    time = infile.variables['time']
912    stage = infile.variables['stage']
913
914    newtime = outfile.variables['time']
915    newstage = outfile.variables['stage']
916
917    if last is None:
918        last = len(time)
919
920    selection = range(first, last, step)
921    for i, j in enumerate(selection):
922        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
923        newtime[i] = time[j]
924        newstage[i,:] = stage[j,:]
925
926    #Close
927    infile.close()
928    outfile.close()
929
930
931#Get data objects
932def get_dataobject(domain, mode='w'):
933    """Return instance of class of given format using filename
934    """
935
936    cls = eval('Data_format_%s' %domain.format)
937    return cls(domain, mode)
938
939def xya2pts(basename_in, basename_out=None, verbose=False,
940            #easting_min=None, easting_max=None,
941            #northing_min=None, northing_max=None,
942            stride = 1,           
943            attribute_name = 'elevation',
944            z_func = None):
945    """Read points data from ascii (.xya)
946
947    Example:
948
949              x(m)        y(m)        z(m)
950         0.00000e+00  0.00000e+00  1.3535000e-01
951         0.00000e+00  1.40000e-02  1.3535000e-01
952
953   
954
955    Convert to NetCDF pts format which is
956
957    points:  (Nx2) Float array
958    elevation: N Float array
959
960    Only lines that contain three numeric values are processed
961
962    If z_func is specified, it will be applied to the third field
963    """
964
965    import os 
966    #from Scientific.IO.NetCDF import NetCDFFile
967    from Numeric import Float, arrayrange, concatenate
968
969    root, ext = os.path.splitext(basename_in)
970
971    if ext == '': ext = '.xya'
972
973    #Get NetCDF
974    infile = open(root + ext, 'r')  #Open existing xya file for read
975
976    if verbose: print 'Reading xya points from %s' %(root + ext)
977
978    points = []
979    attribute = []
980    for i, line in enumerate(infile.readlines()):
981
982        if i % stride != 0: continue
983       
984        fields = line.split()
985
986        try:
987            assert len(fields) == 3
988        except:   
989            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line) 
990
991        try:
992            x = float( fields[0] )
993            y = float( fields[1] )
994            z = float( fields[2] )           
995        except:
996            continue
997
998        points.append( [x, y] )
999
1000        if callable(z_func):
1001            attribute.append(z_func(z))           
1002        else:   
1003            attribute.append(z)
1004
1005
1006    #Get output file
1007    if basename_out == None:
1008        ptsname = root + '.pts'
1009    else:
1010        ptsname = basename_out + '.pts'
1011
1012    if verbose: print 'Store to NetCDF file %s' %ptsname
1013    write_ptsfile(ptsname, points, attribute, attribute_name)
1014
1015
1016def write_ptsfile(ptsname, points, attribute, attribute_name = None):
1017    """Write points and associated attribute to pts (NetCDF) format
1018    """
1019
1020    from Numeric import Float
1021   
1022    if attribute_name is None:
1023        attribute_name = 'attribute'       
1024   
1025
1026    from Scientific.IO.NetCDF import NetCDFFile
1027   
1028    # NetCDF file definition
1029    outfile = NetCDFFile(ptsname, 'w')
1030
1031
1032    #Create new file
1033    outfile.institution = 'Geoscience Australia'
1034    outfile.description = 'NetCDF pts format for compact and '\
1035                          'portable storage of spatial point data'
1036   
1037
1038    #Georeferencing
1039    from coordinate_transforms.geo_reference import Geo_reference
1040    Geo_reference().write_NetCDF(outfile)
1041   
1042
1043    outfile.createDimension('number_of_points', len(points))
1044    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1045
1046    # variable definitions
1047    outfile.createVariable('points', Float, ('number_of_points',
1048                                             'number_of_dimensions'))
1049    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1050
1051    # Get handles to the variables
1052    nc_points = outfile.variables['points']
1053    nc_attribute = outfile.variables[attribute_name]
1054
1055    #Store data
1056    nc_points[:, :] = points
1057    nc_attribute[:] = attribute
1058
1059    outfile.close()
1060   
1061
1062def dem2pts(basename_in, basename_out=None, verbose=False,
1063            easting_min=None, easting_max=None,
1064            northing_min=None, northing_max=None):
1065    """Read Digitial Elevation model from the following NetCDF format (.dem)
1066
1067    Example:
1068
1069    ncols         3121
1070    nrows         1800
1071    xllcorner     722000
1072    yllcorner     5893000
1073    cellsize      25
1074    NODATA_value  -9999
1075    138.3698 137.4194 136.5062 135.5558 ..........
1076
1077    Convert to NetCDF pts format which is
1078
1079    points:  (Nx2) Float array
1080    elevation: N Float array
1081    """
1082
1083    #FIXME: Can this be written feasibly using write_pts?
1084
1085    import os
1086    from Scientific.IO.NetCDF import NetCDFFile
1087    from Numeric import Float, zeros, reshape
1088
1089    root = basename_in
1090
1091    #Get NetCDF
1092    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1093
1094    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1095
1096    ncols = infile.ncols[0]
1097    nrows = infile.nrows[0]
1098    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1099    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1100    cellsize = infile.cellsize[0]
1101    NODATA_value = infile.NODATA_value[0]
1102    dem_elevation = infile.variables['elevation']
1103
1104    zone = infile.zone[0]
1105    false_easting = infile.false_easting[0]
1106    false_northing = infile.false_northing[0]
1107
1108    #Text strings
1109    projection = infile.projection
1110    datum = infile.datum
1111    units = infile.units
1112
1113
1114    #Get output file
1115    if basename_out == None:
1116        ptsname = root + '.pts'
1117    else:
1118        ptsname = basename_out + '.pts'
1119
1120    if verbose: print 'Store to NetCDF file %s' %ptsname
1121    # NetCDF file definition
1122    outfile = NetCDFFile(ptsname, 'w')
1123
1124    #Create new file
1125    outfile.institution = 'Geoscience Australia'
1126    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1127                      'of spatial point data'
1128    #assign default values
1129    if easting_min is None: easting_min = xllcorner
1130    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1131    if northing_min is None: northing_min = yllcorner
1132    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1133
1134    #compute offsets to update georeferencing
1135    easting_offset = xllcorner - easting_min
1136    northing_offset = yllcorner - northing_min
1137
1138    #Georeferencing
1139    outfile.zone = zone
1140    outfile.xllcorner = easting_min #Easting of lower left corner
1141    outfile.yllcorner = northing_min #Northing of lower left corner
1142    outfile.false_easting = false_easting
1143    outfile.false_northing = false_northing
1144
1145    outfile.projection = projection
1146    outfile.datum = datum
1147    outfile.units = units
1148
1149
1150    #Grid info (FIXME: probably not going to be used, but heck)
1151    outfile.ncols = ncols
1152    outfile.nrows = nrows
1153
1154
1155    # dimension definitions
1156    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1157    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1158    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1159    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1160
1161    # variable definitions
1162    outfile.createVariable('points', Float, ('number_of_points',
1163                                             'number_of_dimensions'))
1164    outfile.createVariable('elevation', Float, ('number_of_points',))
1165
1166    # Get handles to the variables
1167    points = outfile.variables['points']
1168    elevation = outfile.variables['elevation']
1169
1170    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1171
1172    #Store data
1173    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1174    global_index = 0
1175    for i in range(nrows):
1176        if verbose and i%((nrows+10)/10)==0:
1177            print 'Processing row %d of %d' %(i, nrows)       
1178       
1179        lower_index = global_index
1180        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1181        telev =  zeros(ncols_in_bounding_box, Float)
1182        local_index = 0
1183
1184        y = (nrows-i)*cellsize + yllcorner
1185        for j in range(ncols):
1186
1187            x = j*cellsize + xllcorner
1188            if easting_min <= x <= easting_max and \
1189               northing_min <= y <= northing_max:
1190                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1191                telev[local_index] = dem_elevation_r[i, j]
1192                global_index += 1
1193                local_index += 1
1194
1195        upper_index = global_index
1196
1197        if upper_index == lower_index + ncols_in_bounding_box:
1198            points[lower_index:upper_index, :] = tpoints
1199            elevation[lower_index:upper_index] = telev
1200
1201    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1202
1203    infile.close()
1204    outfile.close()
1205
1206
1207
1208def sww2dem(basename_in, basename_out = None,
1209            quantity = None,
1210            timestep = None,
1211            reduction = None,
1212            cellsize = 10,
1213            easting_min = None,
1214            easting_max = None,
1215            northing_min = None,
1216            northing_max = None,         
1217            verbose = False,
1218            origin = None,
1219            datum = 'WGS84',
1220            format = 'ers'):
1221   
1222    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1223
1224    Example (ASC):
1225
1226    ncols         3121
1227    nrows         1800
1228    xllcorner     722000
1229    yllcorner     5893000
1230    cellsize      25
1231    NODATA_value  -9999
1232    138.3698 137.4194 136.5062 135.5558 ..........
1233
1234    Also write accompanying file with same basename_in but extension .prj
1235    used to fix the UTM zone, datum, false northings and eastings.
1236
1237    The prj format is assumed to be as
1238
1239    Projection    UTM
1240    Zone          56
1241    Datum         WGS84
1242    Zunits        NO
1243    Units         METERS
1244    Spheroid      WGS84
1245    Xshift        0.0000000000
1246    Yshift        10000000.0000000000
1247    Parameters
1248
1249
1250    The parameter quantity must be the name of an existing quantity or
1251    an expression involving existing quantities. The default is
1252    'elevation'.
1253
1254    if timestep (an index) is given, output quantity at that timestep
1255
1256    if reduction is given use that to reduce quantity over all timesteps.
1257
1258    datum
1259   
1260    format can be either 'asc' or 'ers'
1261    """
1262
1263    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1264    from utilities.polygon import inside_polygon
1265    from util import apply_expression_to_dictionary
1266   
1267    msg = 'Format must be either asc or ers'
1268    assert format.lower() in ['asc', 'ers'], msg
1269   
1270    false_easting = 500000
1271    false_northing = 10000000
1272
1273    if quantity is None:
1274        quantity = 'elevation'
1275
1276    if reduction is None:
1277        reduction = max
1278
1279    if basename_out is None:
1280        basename_out = basename_in + '_%s' %quantity
1281 
1282    swwfile = basename_in + '.sww'
1283    demfile = basename_out + '.' + format
1284    # Note the use of a .ers extension is optional (write_ermapper_grid will
1285    # deal with either option
1286
1287    #Read sww file
1288    if verbose: print 'Reading from %s' %swwfile
1289    from Scientific.IO.NetCDF import NetCDFFile
1290    fid = NetCDFFile(swwfile)
1291
1292    #Get extent and reference
1293    x = fid.variables['x'][:]
1294    y = fid.variables['y'][:]
1295    volumes = fid.variables['volumes'][:]
1296
1297    number_of_timesteps = fid.dimensions['number_of_timesteps']
1298    number_of_points = fid.dimensions['number_of_points']
1299    if origin is None:
1300
1301        #Get geo_reference
1302        #sww files don't have to have a geo_ref
1303        try:
1304            geo_reference = Geo_reference(NetCDFObject=fid)
1305        except AttributeError, e:
1306            geo_reference = Geo_reference() #Default georef object
1307
1308        xllcorner = geo_reference.get_xllcorner()
1309        yllcorner = geo_reference.get_yllcorner()
1310        zone = geo_reference.get_zone()
1311    else:
1312        zone = origin[0]
1313        xllcorner = origin[1]
1314        yllcorner = origin[2]
1315
1316
1317
1318    #Get quantity and reduce if applicable
1319    if verbose: print 'Processing quantity %s' %quantity
1320
1321    #Turn NetCDF objects into Numeric arrays
1322    quantity_dict = {}
1323    for name in fid.variables.keys():
1324        quantity_dict[name] = fid.variables[name][:]
1325
1326
1327    q = apply_expression_to_dictionary(quantity, quantity_dict)
1328
1329
1330    if len(q.shape) == 2:
1331        #q has a time component and needs to be reduced along
1332        #the temporal dimension
1333        if verbose: print 'Reducing quantity %s' %quantity
1334        q_reduced = zeros( number_of_points, Float )
1335
1336        for k in range(number_of_points):
1337            q_reduced[k] = reduction( q[:,k] )
1338
1339        q = q_reduced
1340
1341    #Post condition: Now q has dimension: number_of_points
1342    assert len(q.shape) == 1
1343    assert q.shape[0] == number_of_points   
1344   
1345
1346    #Create grid and update xll/yll corner and x,y
1347
1348    #Relative extent
1349    if easting_min is None:
1350        xmin = min(x)
1351    else:
1352        xmin = easting_min - xllcorner
1353
1354    if easting_max is None:
1355        xmax = max(x)
1356    else:
1357        xmax = easting_max - xllcorner
1358       
1359    if northing_min is None:       
1360        ymin = min(y)
1361    else:
1362        ymin = northing_min - yllcorner
1363       
1364    if northing_max is None:       
1365        ymax = max(y)
1366    else:
1367        ymax = northing_max - yllcorner
1368       
1369       
1370   
1371    if verbose: print 'Creating grid'
1372    ncols = int((xmax-xmin)/cellsize)+1
1373    nrows = int((ymax-ymin)/cellsize)+1
1374
1375
1376    #New absolute reference and coordinates
1377    newxllcorner = xmin+xllcorner
1378    newyllcorner = ymin+yllcorner
1379
1380    x = x+xllcorner-newxllcorner
1381    y = y+yllcorner-newyllcorner
1382
1383    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1384    assert len(vertex_points.shape) == 2
1385
1386
1387    from Numeric import zeros, Float
1388    grid_points = zeros ( (ncols*nrows, 2), Float )
1389
1390
1391    for i in xrange(nrows):
1392        if format.lower() == 'asc':
1393            yg = i*cellsize
1394        else:   
1395            #this will flip the order of the y values for ers
1396            yg = (nrows-i)*cellsize
1397           
1398        for j in xrange(ncols):
1399            xg = j*cellsize
1400            k = i*ncols + j
1401
1402            grid_points[k,0] = xg
1403            grid_points[k,1] = yg
1404
1405    #Interpolate
1406    from least_squares import Interpolation
1407   
1408
1409    #FIXME: This should be done with precrop = True (?), otherwise it'll
1410    #take forever. With expand_search  set to False, some grid points might
1411    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1412
1413    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1414                           precrop = False, expand_search = True,
1415                           verbose = verbose)
1416
1417    #Interpolate using quantity values
1418    if verbose: print 'Interpolating'
1419    grid_values = interp.interpolate(q).flat
1420
1421    if format.lower() == 'ers':
1422        # setup ERS header information
1423        grid_values = reshape(grid_values,(nrows, ncols))   
1424        NODATA_value = 0
1425        header = {}
1426        header['datum'] = '"' + datum + '"'
1427        # FIXME The use of hardwired UTM and zone number needs to be made optional
1428        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1429        header['projection'] = '"UTM-' + str(zone) + '"'
1430        header['coordinatetype'] = 'EN'
1431        if header['coordinatetype'] == 'LL':
1432            header['longitude'] = str(newxllcorner)
1433            header['latitude'] = str(newyllcorner)
1434        elif header['coordinatetype'] == 'EN':
1435            header['eastings'] = str(newxllcorner)
1436            header['northings'] = str(newyllcorner)
1437        header['nullcellvalue'] = str(NODATA_value)
1438        header['xdimension'] = str(cellsize)
1439        header['ydimension'] = str(cellsize)
1440        header['value'] = '"' + quantity + '"'
1441
1442
1443        #Write
1444        if verbose: print 'Writing %s' %demfile
1445        import ermapper_grids
1446        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1447
1448        fid.close()   
1449    else:
1450        #Write to Ascii format
1451       
1452        #Write prj file
1453        prjfile = basename_out + '.prj' 
1454       
1455        if verbose: print 'Writing %s' %prjfile
1456        prjid = open(prjfile, 'w')
1457        prjid.write('Projection    %s\n' %'UTM')
1458        prjid.write('Zone          %d\n' %zone)
1459        prjid.write('Datum         %s\n' %datum)
1460        prjid.write('Zunits        NO\n')
1461        prjid.write('Units         METERS\n')
1462        prjid.write('Spheroid      %s\n' %datum)
1463        prjid.write('Xshift        %d\n' %false_easting)
1464        prjid.write('Yshift        %d\n' %false_northing)
1465        prjid.write('Parameters\n')
1466        prjid.close()
1467
1468
1469   
1470        if verbose: print 'Writing %s' %ascfile
1471        NODATA_value = -9999
1472 
1473        ascid = open(demfile, 'w')
1474
1475        ascid.write('ncols         %d\n' %ncols)
1476        ascid.write('nrows         %d\n' %nrows)
1477        ascid.write('xllcorner     %d\n' %newxllcorner)
1478        ascid.write('yllcorner     %d\n' %newyllcorner)
1479        ascid.write('cellsize      %f\n' %cellsize)
1480        ascid.write('NODATA_value  %d\n' %NODATA_value)
1481
1482
1483        #Get bounding polygon from mesh
1484        P = interp.mesh.get_boundary_polygon()
1485        inside_indices = inside_polygon(grid_points, P)
1486
1487        for i in range(nrows):
1488            if verbose and i%((nrows+10)/10)==0:
1489                print 'Doing row %d of %d' %(i, nrows)
1490
1491            for j in range(ncols):
1492                index = (nrows-i-1)*ncols+j
1493
1494                if sometrue(inside_indices == index):
1495                    ascid.write('%f ' %grid_values[index])
1496                else:
1497                    ascid.write('%d ' %NODATA_value)
1498
1499            ascid.write('\n')
1500
1501        #Close
1502        ascid.close()
1503        fid.close()
1504
1505#Backwards compatibility       
1506def sww2asc(basename_in, basename_out = None,
1507            quantity = None,
1508            timestep = None,
1509            reduction = None,
1510            cellsize = 10,
1511            verbose = False,
1512            origin = None):
1513    print 'sww2asc will soon be obsoleted - please use sww2dem'
1514    sww2dem(basename_in,
1515            basename_out = basename_out,
1516            quantity = quantity,
1517            timestep = timestep,
1518            reduction = reduction,
1519            cellsize = cellsize,
1520            verbose = verbose,
1521            origin = origin,
1522            datum = 'WGS84',
1523            format = 'asc')
1524           
1525def sww2ers(basename_in, basename_out = None,
1526            quantity = None,
1527            timestep = None,
1528            reduction = None,
1529            cellsize = 10,
1530            verbose = False,
1531            origin = None,
1532            datum = 'WGS84'):
1533    print 'sww2ers will soon be obsoleted - please use sww2dem'
1534    sww2dem(basename_in, 
1535            basename_out = basename_out,
1536            quantity = quantity,
1537            timestep = timestep,
1538            reduction = reduction,
1539            cellsize = cellsize,
1540            verbose = verbose,
1541            origin = origin,
1542            datum = datum,
1543            format = 'ers')         
1544################################# END COMPATIBILITY ##############         
1545
1546
1547
1548
1549def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1550                                  verbose=False):
1551    """Read Digitial Elevation model from the following ASCII format (.asc)
1552
1553    Example:
1554
1555    ncols         3121
1556    nrows         1800
1557    xllcorner     722000
1558    yllcorner     5893000
1559    cellsize      25
1560    NODATA_value  -9999
1561    138.3698 137.4194 136.5062 135.5558 ..........
1562
1563    Convert basename_in + '.asc' to NetCDF format (.dem)
1564    mimicking the ASCII format closely.
1565
1566
1567    An accompanying file with same basename_in but extension .prj must exist
1568    and is used to fix the UTM zone, datum, false northings and eastings.
1569
1570    The prj format is assumed to be as
1571
1572    Projection    UTM
1573    Zone          56
1574    Datum         WGS84
1575    Zunits        NO
1576    Units         METERS
1577    Spheroid      WGS84
1578    Xshift        0.0000000000
1579    Yshift        10000000.0000000000
1580    Parameters
1581    """
1582
1583    import os
1584    from Scientific.IO.NetCDF import NetCDFFile
1585    from Numeric import Float, array
1586
1587    #root, ext = os.path.splitext(basename_in)
1588    root = basename_in
1589
1590    ###########################################
1591    # Read Meta data
1592    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1593    metadatafile = open(root + '.prj')
1594    metalines = metadatafile.readlines()
1595    metadatafile.close()
1596
1597    L = metalines[0].strip().split()
1598    assert L[0].strip().lower() == 'projection'
1599    projection = L[1].strip()                   #TEXT
1600
1601    L = metalines[1].strip().split()
1602    assert L[0].strip().lower() == 'zone'
1603    zone = int(L[1].strip())
1604
1605    L = metalines[2].strip().split()
1606    assert L[0].strip().lower() == 'datum'
1607    datum = L[1].strip()                        #TEXT
1608
1609    L = metalines[3].strip().split()
1610    assert L[0].strip().lower() == 'zunits'     #IGNORE
1611    zunits = L[1].strip()                       #TEXT
1612
1613    L = metalines[4].strip().split()
1614    assert L[0].strip().lower() == 'units'
1615    units = L[1].strip()                        #TEXT
1616
1617    L = metalines[5].strip().split()
1618    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1619    spheroid = L[1].strip()                     #TEXT
1620
1621    L = metalines[6].strip().split()
1622    assert L[0].strip().lower() == 'xshift'
1623    false_easting = float(L[1].strip())
1624
1625    L = metalines[7].strip().split()
1626    assert L[0].strip().lower() == 'yshift'
1627    false_northing = float(L[1].strip())
1628
1629    #print false_easting, false_northing, zone, datum
1630
1631
1632    ###########################################
1633    #Read DEM data
1634
1635    datafile = open(basename_in + '.asc')
1636
1637    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1638    lines = datafile.readlines()
1639    datafile.close()
1640
1641    if verbose: print 'Got', len(lines), ' lines'
1642
1643    ncols = int(lines[0].split()[1].strip())
1644    nrows = int(lines[1].split()[1].strip())
1645    xllcorner = float(lines[2].split()[1].strip())
1646    yllcorner = float(lines[3].split()[1].strip())
1647    cellsize = float(lines[4].split()[1].strip())
1648    NODATA_value = int(lines[5].split()[1].strip())
1649
1650    assert len(lines) == nrows + 6
1651
1652
1653    ##########################################
1654
1655
1656    if basename_out == None:
1657        netcdfname = root + '.dem'
1658    else:
1659        netcdfname = basename_out + '.dem'
1660
1661    if verbose: print 'Store to NetCDF file %s' %netcdfname
1662    # NetCDF file definition
1663    fid = NetCDFFile(netcdfname, 'w')
1664
1665    #Create new file
1666    fid.institution = 'Geoscience Australia'
1667    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1668                      'of spatial point data'
1669
1670    fid.ncols = ncols
1671    fid.nrows = nrows
1672    fid.xllcorner = xllcorner
1673    fid.yllcorner = yllcorner
1674    fid.cellsize = cellsize
1675    fid.NODATA_value = NODATA_value
1676
1677    fid.zone = zone
1678    fid.false_easting = false_easting
1679    fid.false_northing = false_northing
1680    fid.projection = projection
1681    fid.datum = datum
1682    fid.units = units
1683
1684
1685    # dimension definitions
1686    fid.createDimension('number_of_rows', nrows)
1687    fid.createDimension('number_of_columns', ncols)
1688
1689    # variable definitions
1690    fid.createVariable('elevation', Float, ('number_of_rows',
1691                                            'number_of_columns'))
1692
1693    # Get handles to the variables
1694    elevation = fid.variables['elevation']
1695
1696    #Store data
1697    n = len(lines[6:])
1698    for i, line in enumerate(lines[6:]):
1699        fields = line.split()
1700        if verbose and i%((n+10)/10)==0:
1701            print 'Processing row %d of %d' %(i, nrows)           
1702
1703        elevation[i, :] = array([float(x) for x in fields])
1704
1705    fid.close()
1706
1707
1708
1709
1710
1711def ferret2sww(basename_in, basename_out = None,
1712               verbose = False,
1713               minlat = None, maxlat = None,
1714               minlon = None, maxlon = None,
1715               mint = None, maxt = None, mean_stage = 0,
1716               origin = None, zscale = 1,
1717               fail_on_NaN = True,
1718               NaN_filler = 0,
1719               elevation = None,
1720               inverted_bathymetry = False
1721               ): #FIXME: Bathymetry should be obtained
1722                                  #from MOST somehow.
1723                                  #Alternatively from elsewhere
1724                                  #or, as a last resort,
1725                                  #specified here.
1726                                  #The value of -100 will work
1727                                  #for the Wollongong tsunami
1728                                  #scenario but is very hacky
1729    """Convert 'Ferret' NetCDF format for wave propagation to
1730    sww format native to pyvolution.
1731
1732    Specify only basename_in and read files of the form
1733    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
1734    relative height, x-velocity and y-velocity, respectively.
1735
1736    Also convert latitude and longitude to UTM. All coordinates are
1737    assumed to be given in the GDA94 datum.
1738
1739    min's and max's: If omitted - full extend is used.
1740    To include a value min may equal it, while max must exceed it.
1741    Lat and lon are assuemd to be in decimal degrees
1742
1743    origin is a 3-tuple with geo referenced
1744    UTM coordinates (zone, easting, northing)
1745
1746    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
1747    which means that longitude is the fastest
1748    varying dimension (row major order, so to speak)
1749
1750    ferret2sww uses grid points as vertices in a triangular grid
1751    counting vertices from lower left corner upwards, then right
1752    """
1753
1754    import os
1755    from Scientific.IO.NetCDF import NetCDFFile
1756    from Numeric import Float, Int, Int32, searchsorted, zeros, array
1757    from Numeric import allclose, around
1758       
1759    precision = Float
1760
1761
1762    #Get NetCDF data
1763    if verbose: print 'Reading files %s_*.nc' %basename_in
1764    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
1765    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
1766    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
1767    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
1768
1769    if basename_out is None:
1770        swwname = basename_in + '.sww'
1771    else:
1772        swwname = basename_out + '.sww'
1773
1774    times = file_h.variables['TIME']
1775    latitudes = file_h.variables['LAT']
1776    longitudes = file_h.variables['LON']
1777
1778
1779
1780    #Precision used by most for lat/lon is 4 or 5 decimals
1781    e_lat = around(file_e.variables['LAT'][:], 5)
1782    e_lon = around(file_e.variables['LON'][:], 5)
1783
1784    #Check that files are compatible
1785    assert allclose(latitudes, file_u.variables['LAT'])
1786    assert allclose(latitudes, file_v.variables['LAT'])
1787    assert allclose(latitudes, e_lat)
1788
1789    assert allclose(longitudes, file_u.variables['LON'])
1790    assert allclose(longitudes, file_v.variables['LON'])
1791    assert allclose(longitudes, e_lon)   
1792
1793
1794
1795    if mint == None:
1796        jmin = 0
1797    else:
1798        jmin = searchsorted(times, mint)
1799
1800    if maxt == None:
1801        jmax=len(times)
1802    else:
1803        jmax = searchsorted(times, maxt)
1804
1805    if minlat == None:
1806        kmin=0
1807    else:
1808        kmin = searchsorted(latitudes, minlat)
1809
1810    if maxlat == None:
1811        kmax = len(latitudes)
1812    else:
1813        kmax = searchsorted(latitudes, maxlat)
1814
1815    if minlon == None:
1816        lmin=0
1817    else:
1818        lmin = searchsorted(longitudes, minlon)
1819
1820    if maxlon == None:
1821        lmax = len(longitudes)
1822    else:
1823        lmax = searchsorted(longitudes, maxlon)
1824
1825
1826
1827    times = times[jmin:jmax]
1828    latitudes = latitudes[kmin:kmax]
1829    longitudes = longitudes[lmin:lmax]
1830
1831
1832    if verbose: print 'cropping'
1833    zname = 'ELEVATION'
1834
1835   
1836    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
1837    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
1838    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
1839    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
1840
1841#    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
1842#        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
1843#    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
1844#        from Numeric import asarray
1845#        elevations=elevations.tolist()
1846#        elevations.reverse()
1847#        elevations=asarray(elevations)
1848#    else:
1849#        from Numeric import asarray
1850#        elevations=elevations.tolist()
1851#        elevations.reverse()
1852#        elevations=asarray(elevations)
1853#        'print hmmm'
1854
1855
1856
1857    #Get missing values
1858    nan_ha = file_h.variables['HA'].missing_value[0]
1859    nan_ua = file_u.variables['UA'].missing_value[0]
1860    nan_va = file_v.variables['VA'].missing_value[0]
1861    if hasattr(file_e.variables[zname],'missing_value'):
1862        nan_e  = file_e.variables[zname].missing_value[0]
1863    else:
1864        nan_e = None
1865
1866    #Cleanup
1867    from Numeric import sometrue
1868
1869    missing = (amplitudes == nan_ha)
1870    if sometrue (missing):
1871        if fail_on_NaN:
1872            msg = 'NetCDFFile %s contains missing values'\
1873                  %(basename_in+'_ha.nc')
1874            raise msg
1875        else:
1876            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
1877
1878    missing = (uspeed == nan_ua)
1879    if sometrue (missing):
1880        if fail_on_NaN:
1881            msg = 'NetCDFFile %s contains missing values'\
1882                  %(basename_in+'_ua.nc')
1883            raise msg
1884        else:
1885            uspeed = uspeed*(missing==0) + missing*NaN_filler
1886
1887    missing = (vspeed == nan_va)
1888    if sometrue (missing):
1889        if fail_on_NaN:
1890            msg = 'NetCDFFile %s contains missing values'\
1891                  %(basename_in+'_va.nc')
1892            raise msg
1893        else:
1894            vspeed = vspeed*(missing==0) + missing*NaN_filler
1895
1896
1897    missing = (elevations == nan_e)
1898    if sometrue (missing):
1899        if fail_on_NaN:
1900            msg = 'NetCDFFile %s contains missing values'\
1901                  %(basename_in+'_e.nc')
1902            raise msg
1903        else:
1904            elevations = elevations*(missing==0) + missing*NaN_filler
1905
1906    #######
1907
1908
1909
1910    number_of_times = times.shape[0]
1911    number_of_latitudes = latitudes.shape[0]
1912    number_of_longitudes = longitudes.shape[0]
1913
1914    assert amplitudes.shape[0] == number_of_times
1915    assert amplitudes.shape[1] == number_of_latitudes
1916    assert amplitudes.shape[2] == number_of_longitudes
1917
1918    if verbose:
1919        print '------------------------------------------------'
1920        print 'Statistics:'
1921        print '  Extent (lat/lon):'
1922        print '    lat in [%f, %f], len(lat) == %d'\
1923              %(min(latitudes.flat), max(latitudes.flat),
1924                len(latitudes.flat))
1925        print '    lon in [%f, %f], len(lon) == %d'\
1926              %(min(longitudes.flat), max(longitudes.flat),
1927                len(longitudes.flat))
1928        print '    t in [%f, %f], len(t) == %d'\
1929              %(min(times.flat), max(times.flat), len(times.flat))
1930
1931        q = amplitudes.flat
1932        name = 'Amplitudes (ha) [cm]'
1933        print %s in [%f, %f]' %(name, min(q), max(q))
1934
1935        q = uspeed.flat
1936        name = 'Speeds (ua) [cm/s]'
1937        print %s in [%f, %f]' %(name, min(q), max(q))
1938
1939        q = vspeed.flat
1940        name = 'Speeds (va) [cm/s]'
1941        print %s in [%f, %f]' %(name, min(q), max(q))
1942
1943        q = elevations.flat
1944        name = 'Elevations (e) [m]'
1945        print %s in [%f, %f]' %(name, min(q), max(q))
1946
1947
1948    #print number_of_latitudes, number_of_longitudes
1949    number_of_points = number_of_latitudes*number_of_longitudes
1950    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
1951
1952
1953    file_h.close()
1954    file_u.close()
1955    file_v.close()
1956    file_e.close()
1957
1958
1959    # NetCDF file definition
1960    outfile = NetCDFFile(swwname, 'w')
1961
1962    #Create new file
1963    outfile.institution = 'Geoscience Australia'
1964    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
1965                          %(basename_in + '_ha.nc',
1966                            basename_in + '_ua.nc',
1967                            basename_in + '_va.nc',
1968                            basename_in + '_e.nc')
1969
1970
1971    #For sww compatibility
1972    outfile.smoothing = 'Yes'
1973    outfile.order = 1
1974
1975    #Start time in seconds since the epoch (midnight 1/1/1970)
1976    outfile.starttime = starttime = times[0]
1977    times = times - starttime  #Store relative times
1978
1979    # dimension definitions
1980    outfile.createDimension('number_of_volumes', number_of_volumes)
1981
1982    outfile.createDimension('number_of_vertices', 3)
1983    outfile.createDimension('number_of_points', number_of_points)
1984
1985
1986    #outfile.createDimension('number_of_timesteps', len(times))
1987    outfile.createDimension('number_of_timesteps', len(times))
1988
1989    # variable definitions
1990    outfile.createVariable('x', precision, ('number_of_points',))
1991    outfile.createVariable('y', precision, ('number_of_points',))
1992    outfile.createVariable('elevation', precision, ('number_of_points',))
1993
1994    #FIXME: Backwards compatibility
1995    outfile.createVariable('z', precision, ('number_of_points',))
1996    #################################
1997
1998    outfile.createVariable('volumes', Int, ('number_of_volumes',
1999                                            'number_of_vertices'))
2000
2001    outfile.createVariable('time', precision,
2002                           ('number_of_timesteps',))
2003
2004    outfile.createVariable('stage', precision,
2005                           ('number_of_timesteps',
2006                            'number_of_points'))
2007
2008    outfile.createVariable('xmomentum', precision,
2009                           ('number_of_timesteps',
2010                            'number_of_points'))
2011
2012    outfile.createVariable('ymomentum', precision,
2013                           ('number_of_timesteps',
2014                            'number_of_points'))
2015
2016
2017    #Store
2018    from coordinate_transforms.redfearn import redfearn
2019    x = zeros(number_of_points, Float)  #Easting
2020    y = zeros(number_of_points, Float)  #Northing
2021
2022
2023    if verbose: print 'Making triangular grid'
2024    #Check zone boundaries
2025    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2026
2027    vertices = {}
2028    i = 0
2029    for k, lat in enumerate(latitudes):       #Y direction
2030        for l, lon in enumerate(longitudes):  #X direction
2031
2032            vertices[l,k] = i
2033
2034            zone, easting, northing = redfearn(lat,lon)
2035
2036            msg = 'Zone boundary crossed at longitude =', lon
2037            #assert zone == refzone, msg
2038            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2039            x[i] = easting
2040            y[i] = northing
2041            i += 1
2042
2043
2044    #Construct 2 triangles per 'rectangular' element
2045    volumes = []
2046    for l in range(number_of_longitudes-1):    #X direction
2047        for k in range(number_of_latitudes-1): #Y direction
2048            v1 = vertices[l,k+1]
2049            v2 = vertices[l,k]
2050            v3 = vertices[l+1,k+1]
2051            v4 = vertices[l+1,k]
2052
2053            volumes.append([v1,v2,v3]) #Upper element
2054            volumes.append([v4,v3,v2]) #Lower element
2055
2056    volumes = array(volumes)
2057
2058    if origin == None:
2059        zone = refzone
2060        xllcorner = min(x)
2061        yllcorner = min(y)
2062    else:
2063        zone = origin[0]
2064        xllcorner = origin[1]
2065        yllcorner = origin[2]
2066
2067
2068    outfile.xllcorner = xllcorner
2069    outfile.yllcorner = yllcorner
2070    outfile.zone = zone
2071
2072
2073    if elevation is not None:
2074        z = elevation
2075    else:
2076        if inverted_bathymetry:
2077            z = -1*elevations
2078        else:
2079            z = elevations
2080        #FIXME: z should be obtained from MOST and passed in here
2081
2082    from Numeric import resize
2083    z = resize(z,outfile.variables['z'][:].shape)
2084    outfile.variables['x'][:] = x - xllcorner
2085    outfile.variables['y'][:] = y - yllcorner
2086    outfile.variables['z'][:] = z
2087    outfile.variables['elevation'][:] = z  #FIXME HACK
2088    outfile.variables['time'][:] = times   #Store time relative
2089    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2090
2091
2092
2093    #Time stepping
2094    stage = outfile.variables['stage']
2095    xmomentum = outfile.variables['xmomentum']
2096    ymomentum = outfile.variables['ymomentum']
2097
2098    if verbose: print 'Converting quantities'
2099    n = len(times)
2100    for j in range(n):
2101        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2102        i = 0
2103        for k in range(number_of_latitudes):      #Y direction
2104            for l in range(number_of_longitudes): #X direction
2105                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2106                stage[j,i] = w
2107                h = w - z[i]
2108                xmomentum[j,i] = uspeed[j,k,l]/100*h
2109                ymomentum[j,i] = vspeed[j,k,l]/100*h
2110                i += 1
2111
2112
2113    if verbose:
2114        x = outfile.variables['x'][:]
2115        y = outfile.variables['y'][:]
2116        print '------------------------------------------------'
2117        print 'Statistics of output file:'
2118        print '  Name: %s' %swwname
2119        print '  Reference:'
2120        print '    Lower left corner: [%f, %f]'\
2121              %(xllcorner, yllcorner)
2122        print '    Start time: %f' %starttime
2123        print '  Extent:'
2124        print '    x [m] in [%f, %f], len(x) == %d'\
2125              %(min(x.flat), max(x.flat), len(x.flat))
2126        print '    y [m] in [%f, %f], len(y) == %d'\
2127              %(min(y.flat), max(y.flat), len(y.flat))
2128        print '    t [s] in [%f, %f], len(t) == %d'\
2129              %(min(times), max(times), len(times))
2130        print '  Quantities [SI units]:'
2131        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2132            q = outfile.variables[name][:].flat
2133            print '    %s in [%f, %f]' %(name, min(q), max(q))
2134
2135
2136
2137
2138    outfile.close()
2139
2140
2141
2142
2143def timefile2netcdf(filename, quantity_names = None):
2144    """Template for converting typical text files with time series to
2145    NetCDF tms file.
2146
2147
2148    The file format is assumed to be either two fields separated by a comma:
2149
2150        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2151
2152    E.g
2153
2154      31/08/04 00:00:00, 1.328223 0 0
2155      31/08/04 00:15:00, 1.292912 0 0
2156
2157    will provide a time dependent function f(t) with three attributes
2158
2159    filename is assumed to be the rootname with extenisons .txt and .sww
2160    """
2161
2162    import time, calendar
2163    from Numeric import array
2164    from config import time_format
2165    from util import ensure_numeric
2166
2167
2168    fid = open(filename + '.txt')
2169    line = fid.readline()
2170    fid.close()
2171
2172    fields = line.split(',')
2173    msg = 'File %s must have the format date, value0 value1 value2 ...'
2174    assert len(fields) == 2, msg
2175
2176    try:
2177        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2178    except ValueError:
2179        msg = 'First field in file %s must be' %filename
2180        msg += ' date-time with format %s.\n' %time_format
2181        msg += 'I got %s instead.' %fields[0]
2182        raise msg
2183
2184
2185    #Split values
2186    values = []
2187    for value in fields[1].split():
2188        values.append(float(value))
2189
2190    q = ensure_numeric(values)
2191
2192    msg = 'ERROR: File must contain at least one independent value'
2193    assert len(q.shape) == 1, msg
2194
2195
2196
2197    #Read times proper
2198    from Numeric import zeros, Float, alltrue
2199    from config import time_format
2200    import time, calendar
2201
2202    fid = open(filename + '.txt')
2203    lines = fid.readlines()
2204    fid.close()
2205
2206    N = len(lines)
2207    d = len(q)
2208
2209    T = zeros(N, Float)       #Time
2210    Q = zeros((N, d), Float)  #Values
2211
2212    for i, line in enumerate(lines):
2213        fields = line.split(',')
2214        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2215       
2216        T[i] = realtime - starttime
2217
2218        for j, value in enumerate(fields[1].split()):
2219            Q[i, j] = float(value)
2220
2221    msg = 'File %s must list time as a monotonuosly ' %filename
2222    msg += 'increasing sequence'
2223    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2224
2225
2226    #Create NetCDF file
2227    from Scientific.IO.NetCDF import NetCDFFile
2228
2229    fid = NetCDFFile(filename + '.tms', 'w')
2230
2231
2232    fid.institution = 'Geoscience Australia'
2233    fid.description = 'Time series'
2234
2235
2236    #Reference point
2237    #Start time in seconds since the epoch (midnight 1/1/1970)
2238    #FIXME: Use Georef
2239    fid.starttime = starttime
2240   
2241   
2242    # dimension definitions
2243    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2244    #fid.createDimension('number_of_vertices', 3)
2245
2246
2247    fid.createDimension('number_of_timesteps', len(T))
2248
2249    fid.createVariable('time', Float, ('number_of_timesteps',))
2250
2251    fid.variables['time'][:] = T
2252   
2253    for i in range(Q.shape[1]):
2254        try:
2255            name = quantity_names[i]
2256        except:   
2257            name = 'Attribute%d'%i
2258           
2259        fid.createVariable(name, Float, ('number_of_timesteps',))
2260        fid.variables[name][:] = Q[:,i]       
2261
2262    fid.close()
2263
2264
2265def extent_sww(file_name):
2266    """
2267    Read in an sww file.
2268
2269    Input;
2270    file_name - the sww file
2271
2272    Output;
2273    z - Vector of bed elevation
2274    volumes - Array.  Each row has 3 values, representing
2275    the vertices that define the volume
2276    time - Vector of the times where there is stage information
2277    stage - array with respect to time and vertices (x,y)
2278    """
2279
2280
2281    from Scientific.IO.NetCDF import NetCDFFile
2282
2283    #Check contents
2284    #Get NetCDF
2285    fid = NetCDFFile(file_name, 'r')
2286
2287    # Get the variables
2288    x = fid.variables['x'][:]
2289    y = fid.variables['y'][:]
2290    stage = fid.variables['stage'][:]
2291    #print "stage",stage
2292    #print "stage.shap",stage.shape
2293    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2294    #print "min(stage)",min(stage)
2295
2296    fid.close()
2297
2298    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2299
2300
2301def sww2domain(filename,boundary=None,t=None,\
2302               fail_if_NaN=True,NaN_filler=0\
2303               ,verbose = True,very_verbose = False):
2304    """
2305    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2306
2307    Boundary is not recommended if domian.smooth is not selected, as it
2308    uses unique coordinates, but not unique boundaries. This means that
2309    the boundary file will not be compatable with the coordiantes, and will
2310    give a different final boundary, or crash.
2311    """
2312    NaN=9.969209968386869e+036
2313    #initialise NaN.
2314
2315    from Scientific.IO.NetCDF import NetCDFFile
2316    from shallow_water import Domain
2317    from Numeric import asarray, transpose, resize
2318
2319    if verbose: print 'Reading from ', filename
2320    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2321    time = fid.variables['time']       #Timesteps
2322    if t is None:
2323        t = time[-1]
2324    time_interp = get_time_interp(time,t)
2325
2326    # Get the variables as Numeric arrays
2327    x = fid.variables['x'][:]             #x-coordinates of vertices
2328    y = fid.variables['y'][:]             #y-coordinates of vertices
2329    elevation = fid.variables['elevation']     #Elevation
2330    stage = fid.variables['stage']     #Water level
2331    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2332    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2333
2334    starttime = fid.starttime[0]
2335    volumes = fid.variables['volumes'][:] #Connectivity
2336    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2337
2338    conserved_quantities = []
2339    interpolated_quantities = {}
2340    other_quantities = []
2341
2342    # get geo_reference
2343    #sww files don't have to have a geo_ref
2344    try:
2345        geo_reference = Geo_reference(NetCDFObject=fid)
2346    except: #AttributeError, e:
2347        geo_reference = None
2348
2349    if verbose: print '    getting quantities'
2350    for quantity in fid.variables.keys():
2351        dimensions = fid.variables[quantity].dimensions
2352        if 'number_of_timesteps' in dimensions:
2353            conserved_quantities.append(quantity)
2354            interpolated_quantities[quantity]=\
2355                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2356        else: other_quantities.append(quantity)
2357
2358    other_quantities.remove('x')
2359    other_quantities.remove('y')
2360    other_quantities.remove('z')
2361    other_quantities.remove('volumes')
2362
2363    conserved_quantities.remove('time')
2364
2365    if verbose: print '    building domain'
2366#    From domain.Domain:
2367#    domain = Domain(coordinates, volumes,\
2368#                    conserved_quantities = conserved_quantities,\
2369#                    other_quantities = other_quantities,zone=zone,\
2370#                    xllcorner=xllcorner, yllcorner=yllcorner)
2371
2372#   From shallow_water.Domain:
2373    coordinates=coordinates.tolist()
2374    volumes=volumes.tolist()
2375    #FIXME:should this be in mesh?(peter row)
2376    if fid.smoothing == 'Yes': unique = False
2377    else: unique = True
2378    if unique:
2379        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2380
2381
2382    domain = Domain(coordinates, volumes, boundary)
2383
2384    if not boundary is None:
2385        domain.boundary = boundary
2386
2387    domain.geo_reference = geo_reference
2388
2389    domain.starttime=float(starttime)+float(t)
2390    domain.time=0.0
2391
2392    for quantity in other_quantities:
2393        try:
2394            NaN = fid.variables[quantity].missing_value
2395        except:
2396            pass #quantity has no missing_value number
2397        X = fid.variables[quantity][:]
2398        if very_verbose:
2399            print '       ',quantity
2400            print '        NaN =',NaN
2401            print '        max(X)'
2402            print '       ',max(X)
2403            print '        max(X)==NaN'
2404            print '       ',max(X)==NaN
2405            print ''
2406        if (max(X)==NaN) or (min(X)==NaN):
2407            if fail_if_NaN:
2408                msg = 'quantity "%s" contains no_data entry'%quantity
2409                raise msg
2410            else:
2411                data = (X<>NaN)
2412                X = (X*data)+(data==0)*NaN_filler
2413        if unique:
2414            X = resize(X,(len(X)/3,3))
2415        domain.set_quantity(quantity,X)
2416#
2417    for quantity in conserved_quantities:
2418        try:
2419            NaN = fid.variables[quantity].missing_value
2420        except:
2421            pass #quantity has no missing_value number
2422        X = interpolated_quantities[quantity]
2423        if very_verbose:
2424            print '       ',quantity
2425            print '        NaN =',NaN
2426            print '        max(X)'
2427            print '       ',max(X)
2428            print '        max(X)==NaN'
2429            print '       ',max(X)==NaN
2430            print ''
2431        if (max(X)==NaN) or (min(X)==NaN):
2432            if fail_if_NaN:
2433                msg = 'quantity "%s" contains no_data entry'%quantity
2434                raise msg
2435            else:
2436                data = (X<>NaN)
2437                X = (X*data)+(data==0)*NaN_filler
2438        if unique:
2439            X = resize(X,(X.shape[0]/3,3))
2440        domain.set_quantity(quantity,X)
2441    fid.close()
2442    return domain
2443
2444def interpolated_quantity(saved_quantity,time_interp):
2445
2446    #given an index and ratio, interpolate quantity with respect to time.
2447    index,ratio = time_interp
2448    Q = saved_quantity
2449    if ratio > 0:
2450        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2451    else:
2452        q = Q[index]
2453    #Return vector of interpolated values
2454    return q
2455
2456def get_time_interp(time,t=None):
2457    #Finds the ratio and index for time interpolation.
2458    #It is borrowed from previous pyvolution code.
2459    if t is None:
2460        t=time[-1]
2461        index = -1
2462        ratio = 0.
2463    else:
2464        T = time
2465        tau = t
2466        index=0
2467        msg = 'Time interval derived from file %s [%s:%s]'\
2468            %('FIXMEfilename', T[0], T[-1])
2469        msg += ' does not match model time: %s' %tau
2470        if tau < time[0]: raise msg
2471        if tau > time[-1]: raise msg
2472        while tau > time[index]: index += 1
2473        while tau < time[index]: index -= 1
2474        if tau == time[index]:
2475            #Protect against case where tau == time[-1] (last time)
2476            # - also works in general when tau == time[i]
2477            ratio = 0
2478        else:
2479            #t is now between index and index+1
2480            ratio = (tau - time[index])/(time[index+1] - time[index])
2481    return (index,ratio)
2482
2483
2484def weed(coordinates,volumes,boundary = None):
2485    if type(coordinates)=='array':
2486        coordinates = coordinates.tolist()
2487    if type(volumes)=='array':
2488        volumes = volumes.tolist()
2489
2490    unique = False
2491    point_dict = {}
2492    same_point = {}
2493    for i in range(len(coordinates)):
2494        point = tuple(coordinates[i])
2495        if point_dict.has_key(point):
2496            unique = True
2497            same_point[i]=point
2498            #to change all point i references to point j
2499        else:
2500            point_dict[point]=i
2501            same_point[i]=point
2502
2503    coordinates = []
2504    i = 0
2505    for point in point_dict.keys():
2506        point = tuple(point)
2507        coordinates.append(list(point))
2508        point_dict[point]=i
2509        i+=1
2510
2511
2512    for volume in volumes:
2513        for i in range(len(volume)):
2514            index = volume[i]
2515            if index>-1:
2516                volume[i]=point_dict[same_point[index]]
2517
2518    new_boundary = {}
2519    if not boundary is None:
2520        for segment in boundary.keys():
2521            point0 = point_dict[same_point[segment[0]]]
2522            point1 = point_dict[same_point[segment[1]]]
2523            label = boundary[segment]
2524            #FIXME should the bounday attributes be concaterated
2525            #('exterior, pond') or replaced ('pond')(peter row)
2526
2527            if new_boundary.has_key((point0,point1)):
2528                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2529                                              #+','+label
2530
2531            elif new_boundary.has_key((point1,point0)):
2532                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2533                                              #+','+label
2534            else: new_boundary[(point0,point1)]=label
2535
2536        boundary = new_boundary
2537
2538    return coordinates,volumes,boundary
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549###############################################
2550#OBSOLETE STUFF
2551#Native checkpoint format.
2552#Information needed to recreate a state is preserved
2553#FIXME: Rethink and maybe use netcdf format
2554def cpt_variable_writer(filename, t, v0, v1, v2):
2555    """Store all conserved quantities to file
2556    """
2557
2558    M, N = v0.shape
2559
2560    FN = create_filename(filename, 'cpt', M, t)
2561    #print 'Writing to %s' %FN
2562
2563    fid = open(FN, 'w')
2564    for i in range(M):
2565        for j in range(N):
2566            fid.write('%.16e ' %v0[i,j])
2567        for j in range(N):
2568            fid.write('%.16e ' %v1[i,j])
2569        for j in range(N):
2570            fid.write('%.16e ' %v2[i,j])
2571
2572        fid.write('\n')
2573    fid.close()
2574
2575
2576def cpt_variable_reader(filename, t, v0, v1, v2):
2577    """Store all conserved quantities to file
2578    """
2579
2580    M, N = v0.shape
2581
2582    FN = create_filename(filename, 'cpt', M, t)
2583    #print 'Reading from %s' %FN
2584
2585    fid = open(FN)
2586
2587
2588    for i in range(M):
2589        values = fid.readline().split() #Get one line
2590
2591        for j in range(N):
2592            v0[i,j] = float(values[j])
2593            v1[i,j] = float(values[3+j])
2594            v2[i,j] = float(values[6+j])
2595
2596    fid.close()
2597
2598def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2599    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2600    elevation.
2601    FIXME: Not in use pt
2602    """
2603
2604    M, N = v0.shape
2605
2606
2607    print X0
2608    import sys; sys.exit()
2609    FN = create_filename(filename, 'cpt', M)
2610    print 'Writing to %s' %FN
2611
2612    fid = open(FN, 'w')
2613    for i in range(M):
2614        for j in range(2):
2615            fid.write('%.16e ' %X0[i,j])   #x, y
2616        for j in range(N):
2617            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2618
2619        for j in range(2):
2620            fid.write('%.16e ' %X1[i,j])   #x, y
2621        for j in range(N):
2622            fid.write('%.16e ' %v1[i,j])
2623
2624        for j in range(2):
2625            fid.write('%.16e ' %X2[i,j])   #x, y
2626        for j in range(N):
2627            fid.write('%.16e ' %v2[i,j])
2628
2629        fid.write('\n')
2630    fid.close()
2631
2632
2633
2634#Function for storing out to e.g. visualisation
2635#FIXME: Do we want this?
2636#FIXME: Not done yet for this version
2637def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2638    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2639    """
2640
2641    M, N = v0.shape
2642
2643    FN = create_filename(filename, 'dat', M)
2644    #print 'Writing to %s' %FN
2645
2646    fid = open(FN, 'w')
2647    for i in range(M):
2648        for j in range(2):
2649            fid.write('%f ' %X0[i,j])   #x, y
2650        fid.write('%f ' %v0[i,0])       #z
2651
2652        for j in range(2):
2653            fid.write('%f ' %X1[i,j])   #x, y
2654        fid.write('%f ' %v1[i,0])       #z
2655
2656        for j in range(2):
2657            fid.write('%f ' %X2[i,j])   #x, y
2658        fid.write('%f ' %v2[i,0])       #z
2659
2660        fid.write('\n')
2661    fid.close()
2662
2663
2664
2665def dat_variable_writer(filename, t, v0, v1, v2):
2666    """Store water height to file
2667    """
2668
2669    M, N = v0.shape
2670
2671    FN = create_filename(filename, 'dat', M, t)
2672    #print 'Writing to %s' %FN
2673
2674    fid = open(FN, 'w')
2675    for i in range(M):
2676        fid.write('%.4f ' %v0[i,0])
2677        fid.write('%.4f ' %v1[i,0])
2678        fid.write('%.4f ' %v2[i,0])
2679
2680        fid.write('\n')
2681    fid.close()
2682
2683
2684def read_sww(filename):
2685    """Read sww Net CDF file containing Shallow Water Wave simulation
2686
2687    The integer array volumes is of shape Nx3 where N is the number of
2688    triangles in the mesh.
2689
2690    Each entry in volumes is an index into the x,y arrays (the location).
2691
2692    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2693    number_of_timesteps, number_of_points.
2694
2695    The momentum is not always stored.
2696
2697    """
2698    from Scientific.IO.NetCDF import NetCDFFile
2699    print 'Reading from ', filename
2700    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2701#latitude, longitude
2702    # Get the variables as Numeric arrays
2703    x = fid.variables['x']             #x-coordinates of vertices
2704    y = fid.variables['y']             #y-coordinates of vertices
2705    z = fid.variables['elevation']     #Elevation
2706    time = fid.variables['time']       #Timesteps
2707    stage = fid.variables['stage']     #Water level
2708    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2709    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2710
2711    volumes = fid.variables['volumes'] #Connectivity
2712
2713
2714def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
2715                 verbose=False):
2716    """Read Digitial Elevation model from the following NetCDF format (.dem)
2717
2718    Example:
2719
2720    ncols         3121
2721    nrows         1800
2722    xllcorner     722000
2723    yllcorner     5893000
2724    cellsize      25
2725    NODATA_value  -9999
2726    138.3698 137.4194 136.5062 135.5558 ..........
2727
2728    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
2729    """
2730
2731    import os
2732    from Scientific.IO.NetCDF import NetCDFFile
2733    from Numeric import Float, zeros, sum, reshape, equal
2734
2735    root = basename_in
2736    inname = root + '.dem'
2737
2738    #Open existing netcdf file to read
2739    infile = NetCDFFile(inname, 'r')
2740    if verbose: print 'Reading DEM from %s' %inname
2741
2742    #Read metadata
2743    ncols = infile.ncols[0]
2744    nrows = infile.nrows[0]
2745    xllcorner = infile.xllcorner[0]
2746    yllcorner = infile.yllcorner[0]
2747    cellsize = infile.cellsize[0]
2748    NODATA_value = infile.NODATA_value[0]
2749    zone = infile.zone[0]
2750    false_easting = infile.false_easting[0]
2751    false_northing = infile.false_northing[0]
2752    projection = infile.projection
2753    datum = infile.datum
2754    units = infile.units
2755
2756    dem_elevation = infile.variables['elevation']
2757
2758    #Get output file name
2759    if basename_out == None:
2760        outname = root + '_' + repr(cellsize_new) + '.dem'
2761    else:
2762        outname = basename_out + '.dem'
2763
2764    if verbose: print 'Write decimated NetCDF file to %s' %outname
2765
2766    #Determine some dimensions for decimated grid
2767    (nrows_stencil, ncols_stencil) = stencil.shape
2768    x_offset = ncols_stencil / 2
2769    y_offset = nrows_stencil / 2
2770    cellsize_ratio = int(cellsize_new / cellsize)
2771    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
2772    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
2773
2774    #Open netcdf file for output
2775    outfile = NetCDFFile(outname, 'w')
2776
2777    #Create new file
2778    outfile.institution = 'Geoscience Australia'
2779    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
2780                           'of spatial point data'
2781    #Georeferencing
2782    outfile.zone = zone
2783    outfile.projection = projection
2784    outfile.datum = datum
2785    outfile.units = units
2786
2787    outfile.cellsize = cellsize_new
2788    outfile.NODATA_value = NODATA_value
2789    outfile.false_easting = false_easting
2790    outfile.false_northing = false_northing
2791
2792    outfile.xllcorner = xllcorner + (x_offset * cellsize)
2793    outfile.yllcorner = yllcorner + (y_offset * cellsize)
2794    outfile.ncols = ncols_new
2795    outfile.nrows = nrows_new
2796
2797    # dimension definition
2798    outfile.createDimension('number_of_points', nrows_new*ncols_new)
2799
2800    # variable definition
2801    outfile.createVariable('elevation', Float, ('number_of_points',))
2802
2803    # Get handle to the variable
2804    elevation = outfile.variables['elevation']
2805
2806    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
2807
2808    #Store data
2809    global_index = 0
2810    for i in range(nrows_new):
2811        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
2812        lower_index = global_index
2813        telev =  zeros(ncols_new, Float)
2814        local_index = 0
2815        trow = i * cellsize_ratio
2816
2817        for j in range(ncols_new):
2818            tcol = j * cellsize_ratio
2819            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
2820
2821            #if dem contains 1 or more NODATA_values set value in
2822            #decimated dem to NODATA_value, else compute decimated
2823            #value using stencil
2824            if sum(sum(equal(tmp, NODATA_value))) > 0:
2825                telev[local_index] = NODATA_value
2826            else:
2827                telev[local_index] = sum(sum(tmp * stencil))
2828
2829            global_index += 1
2830            local_index += 1
2831
2832        upper_index = global_index
2833
2834        elevation[lower_index:upper_index] = telev
2835
2836    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
2837
2838    infile.close()
2839    outfile.close()
2840
2841
2842   
2843def sww2asc_obsolete(basename_in, basename_out = None,
2844            quantity = None,
2845            timestep = None,
2846            reduction = None,
2847            cellsize = 10,
2848            verbose = False,
2849            origin = None):
2850    """Read SWW file and convert to Digitial Elevation model format (.asc)
2851
2852    Example:
2853
2854    ncols         3121
2855    nrows         1800
2856    xllcorner     722000
2857    yllcorner     5893000
2858    cellsize      25
2859    NODATA_value  -9999
2860    138.3698 137.4194 136.5062 135.5558 ..........
2861
2862    Also write accompanying file with same basename_in but extension .prj
2863    used to fix the UTM zone, datum, false northings and eastings.
2864
2865    The prj format is assumed to be as
2866
2867    Projection    UTM
2868    Zone          56
2869    Datum         WGS84
2870    Zunits        NO
2871    Units         METERS
2872    Spheroid      WGS84
2873    Xshift        0.0000000000
2874    Yshift        10000000.0000000000
2875    Parameters
2876
2877
2878    if quantity is given, out values from quantity otherwise default to
2879    elevation
2880
2881    if timestep (an index) is given, output quantity at that timestep
2882
2883    if reduction is given use that to reduce quantity over all timesteps.
2884
2885    """
2886    from Numeric import array, Float, concatenate, NewAxis, zeros,\
2887         sometrue
2888    from utilities.polygon import inside_polygon
2889
2890    #FIXME: Should be variable
2891    datum = 'WGS84'
2892    false_easting = 500000
2893    false_northing = 10000000
2894
2895    if quantity is None:
2896        quantity = 'elevation'
2897
2898    if reduction is None:
2899        reduction = max
2900
2901    if basename_out is None:
2902        basename_out = basename_in + '_%s' %quantity
2903
2904    swwfile = basename_in + '.sww'
2905    ascfile = basename_out + '.asc'
2906    prjfile = basename_out + '.prj'
2907
2908
2909    if verbose: print 'Reading from %s' %swwfile
2910    #Read sww file
2911    from Scientific.IO.NetCDF import NetCDFFile
2912    fid = NetCDFFile(swwfile)
2913
2914    #Get extent and reference
2915    x = fid.variables['x'][:]
2916    y = fid.variables['y'][:]
2917    volumes = fid.variables['volumes'][:]
2918
2919    ymin = min(y); ymax = max(y)
2920    xmin = min(x); xmax = max(x)
2921
2922    number_of_timesteps = fid.dimensions['number_of_timesteps']
2923    number_of_points = fid.dimensions['number_of_points']
2924    if origin is None:
2925
2926        #Get geo_reference
2927        #sww files don't have to have a geo_ref
2928        try:
2929            geo_reference = Geo_reference(NetCDFObject=fid)
2930        except AttributeError, e:
2931            geo_reference = Geo_reference() #Default georef object
2932
2933        xllcorner = geo_reference.get_xllcorner()
2934        yllcorner = geo_reference.get_yllcorner()
2935        zone = geo_reference.get_zone()
2936    else:
2937        zone = origin[0]
2938        xllcorner = origin[1]
2939        yllcorner = origin[2]
2940
2941
2942    #Get quantity and reduce if applicable
2943    if verbose: print 'Reading quantity %s' %quantity
2944
2945    if quantity.lower() == 'depth':
2946        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
2947    else:
2948        q = fid.variables[quantity][:]
2949
2950
2951    if len(q.shape) == 2:
2952        if verbose: print 'Reducing quantity %s' %quantity
2953        q_reduced = zeros( number_of_points, Float )
2954
2955        for k in range(number_of_points):
2956            q_reduced[k] = reduction( q[:,k] )
2957
2958        q = q_reduced
2959
2960    #Now q has dimension: number_of_points
2961
2962    #Create grid and update xll/yll corner and x,y
2963    if verbose: print 'Creating grid'
2964    ncols = int((xmax-xmin)/cellsize)+1
2965    nrows = int((ymax-ymin)/cellsize)+1
2966
2967    newxllcorner = xmin+xllcorner
2968    newyllcorner = ymin+yllcorner
2969
2970    x = x+xllcorner-newxllcorner
2971    y = y+yllcorner-newyllcorner
2972
2973    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2974    assert len(vertex_points.shape) == 2
2975
2976
2977    from Numeric import zeros, Float
2978    grid_points = zeros ( (ncols*nrows, 2), Float )
2979
2980
2981    for i in xrange(nrows):
2982        yg = i*cellsize
2983        for j in xrange(ncols):
2984            xg = j*cellsize
2985            k = i*ncols + j
2986
2987            grid_points[k,0] = xg
2988            grid_points[k,1] = yg
2989
2990    #Interpolate
2991    from least_squares import Interpolation
2992   
2993
2994    #FIXME: This should be done with precrop = True, otherwise it'll
2995    #take forever. With expand_search  set to False, some grid points might
2996    #miss out....
2997
2998    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
2999                           precrop = False, expand_search = True,
3000                           verbose = verbose)
3001
3002    #Interpolate using quantity values
3003    if verbose: print 'Interpolating'
3004    grid_values = interp.interpolate(q).flat
3005
3006    #Write
3007    #Write prj file
3008    if verbose: print 'Writing %s' %prjfile
3009    prjid = open(prjfile, 'w')
3010    prjid.write('Projection    %s\n' %'UTM')
3011    prjid.write('Zone          %d\n' %zone)
3012    prjid.write('Datum         %s\n' %datum)
3013    prjid.write('Zunits        NO\n')
3014    prjid.write('Units         METERS\n')
3015    prjid.write('Spheroid      %s\n' %datum)
3016    prjid.write('Xshift        %d\n' %false_easting)
3017    prjid.write('Yshift        %d\n' %false_northing)
3018    prjid.write('Parameters\n')
3019    prjid.close()
3020
3021
3022   
3023    if verbose: print 'Writing %s' %ascfile
3024    NODATA_value = -9999
3025 
3026    ascid = open(ascfile, 'w')
3027
3028    ascid.write('ncols         %d\n' %ncols)
3029    ascid.write('nrows         %d\n' %nrows)
3030    ascid.write('xllcorner     %d\n' %newxllcorner)
3031    ascid.write('yllcorner     %d\n' %newyllcorner)
3032    ascid.write('cellsize      %f\n' %cellsize)
3033    ascid.write('NODATA_value  %d\n' %NODATA_value)
3034
3035
3036    #Get bounding polygon from mesh
3037    P = interp.mesh.get_boundary_polygon()
3038    inside_indices = inside_polygon(grid_points, P)
3039
3040    for i in range(nrows):
3041        if verbose and i%((nrows+10)/10)==0:
3042            print 'Doing row %d of %d' %(i, nrows)
3043
3044        for j in range(ncols):
3045            index = (nrows-i-1)*ncols+j
3046
3047            if sometrue(inside_indices == index):
3048                ascid.write('%f ' %grid_values[index])
3049            else:
3050                ascid.write('%d ' %NODATA_value)
3051
3052        ascid.write('\n')
3053
3054    #Close
3055    ascid.close()
3056    fid.close()
3057
3058def asc_csiro2sww(bath_file_in, verbose=False):
3059    pass
3060
3061def _read_asc(filename, verbose=False):
3062    """Read esri file from the following ASCII format (.asc)
3063
3064    Example:
3065
3066    ncols         3121
3067    nrows         1800
3068    xllcorner     722000
3069    yllcorner     5893000
3070    cellsize      25
3071    NODATA_value  -9999
3072    138.3698 137.4194 136.5062 135.5558 ..........
3073    """
3074
3075    datafile = open(filename)
3076
3077    if verbose: print 'Reading DEM from %s' %(filename)
3078    lines = datafile.readlines()
3079    datafile.close()
3080
3081    if verbose: print 'Got', len(lines), ' lines'
3082
3083    ncols = int(lines.pop(0).split()[1].strip())
3084    nrows = int(lines.pop(0).split()[1].strip())
3085    xllcorner = float(lines.pop(0).split()[1].strip())
3086    yllcorner = float(lines.pop(0).split()[1].strip())
3087    cellsize = float(lines.pop(0).split()[1].strip())
3088    NODATA_value = int(lines.pop(0).split()[1].strip())
3089   
3090    assert len(lines) == nrows
3091   
3092    #Store data
3093    grid = []
3094   
3095    n = len(lines)
3096    for i, line in enumerate(lines):
3097        cells = line.split()
3098        assert len(cells) == ncols
3099        grid.append(array([float(x) for x in cells]))
3100    grid = array(grid)
3101
3102    return xllcorner, yllcorner, cellsize, NODATA_value, grid
3103   
Note: See TracBrowser for help on using the repository browser.